ExynosCameraHWInterface2.cpp revision 8b0e51f2eb790ce3a45a23ccef0cddd7c2f88726
1/*
2**
3** Copyright 2008, The Android Open Source Project
4** Copyright 2012, Samsung Electronics Co. LTD
5**
6** Licensed under the Apache License, Version 2.0 (the "License");
7** you may not use this file except in compliance with the License.
8** You may obtain a copy of the License at
9**
10**     http://www.apache.org/licenses/LICENSE-2.0
11**
12** Unless required by applicable law or agreed to in writing, software
13** distributed under the License is distributed on an "AS IS" BASIS,
14** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15** See the License for the specific language governing permissions and
16** limitations under the License.
17*/
18
19/*!
20 * \file      ExynosCameraHWInterface2.cpp
21 * \brief     source file for Android Camera API 2.0 HAL
22 * \author    Sungjoong Kang(sj3.kang@samsung.com)
23 * \date      2012/07/10
24 *
25 * <b>Revision History: </b>
26 * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
27 *   Initial Release
28 *
29 * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
30 *   2nd Release
31 *
32 */
33
34//#define LOG_NDEBUG 0
35#define LOG_TAG "ExynosCameraHAL2"
36#include <utils/Log.h>
37
38#include "ExynosCameraHWInterface2.h"
39#include "exynos_format.h"
40
41
42
43namespace android {
44
45
46void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
47{
48    int nw;
49    int cnt = 0;
50    uint32_t written = 0;
51
52    ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
53    int fd = open(fname, O_RDWR | O_CREAT, 0644);
54    if (fd < 0) {
55        ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
56        return;
57    }
58
59    ALOGV("writing %d bytes to file [%s]", size, fname);
60    while (written < size) {
61        nw = ::write(fd, buf + written, size - written);
62        if (nw < 0) {
63            ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
64            break;
65        }
66        written += nw;
67        cnt++;
68    }
69    ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
70    ::close(fd);
71}
72
73int get_pixel_depth(uint32_t fmt)
74{
75    int depth = 0;
76
77    switch (fmt) {
78    case V4L2_PIX_FMT_JPEG:
79        depth = 8;
80        break;
81
82    case V4L2_PIX_FMT_NV12:
83    case V4L2_PIX_FMT_NV21:
84    case V4L2_PIX_FMT_YUV420:
85    case V4L2_PIX_FMT_YVU420M:
86    case V4L2_PIX_FMT_NV12M:
87    case V4L2_PIX_FMT_NV12MT:
88        depth = 12;
89        break;
90
91    case V4L2_PIX_FMT_RGB565:
92    case V4L2_PIX_FMT_YUYV:
93    case V4L2_PIX_FMT_YVYU:
94    case V4L2_PIX_FMT_UYVY:
95    case V4L2_PIX_FMT_VYUY:
96    case V4L2_PIX_FMT_NV16:
97    case V4L2_PIX_FMT_NV61:
98    case V4L2_PIX_FMT_YUV422P:
99    case V4L2_PIX_FMT_SBGGR10:
100    case V4L2_PIX_FMT_SBGGR12:
101    case V4L2_PIX_FMT_SBGGR16:
102        depth = 16;
103        break;
104
105    case V4L2_PIX_FMT_RGB32:
106        depth = 32;
107        break;
108    default:
109        ALOGE("Get depth failed(format : %d)", fmt);
110        break;
111    }
112
113    return depth;
114}
115
116int cam_int_s_fmt(node_info_t *node)
117{
118    struct v4l2_format v4l2_fmt;
119    unsigned int framesize;
120    int ret;
121
122    memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
123
124    v4l2_fmt.type = node->type;
125    framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
126
127    if (node->planes >= 1) {
128        v4l2_fmt.fmt.pix_mp.width       = node->width;
129        v4l2_fmt.fmt.pix_mp.height      = node->height;
130        v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
131        v4l2_fmt.fmt.pix_mp.field       = V4L2_FIELD_ANY;
132    } else {
133        ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
134    }
135
136    /* Set up for capture */
137    ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
138
139    if (ret < 0)
140        ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
141
142
143    return ret;
144}
145
146int cam_int_reqbufs(node_info_t *node)
147{
148    struct v4l2_requestbuffers req;
149    int ret;
150
151    req.count = node->buffers;
152    req.type = node->type;
153    req.memory = node->memory;
154
155    ret = exynos_v4l2_reqbufs(node->fd, &req);
156
157    if (ret < 0)
158        ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
159
160    return req.count;
161}
162
163int cam_int_qbuf(node_info_t *node, int index)
164{
165    struct v4l2_buffer v4l2_buf;
166    struct v4l2_plane planes[VIDEO_MAX_PLANES];
167    int i;
168    int ret = 0;
169
170    v4l2_buf.m.planes   = planes;
171    v4l2_buf.type       = node->type;
172    v4l2_buf.memory     = node->memory;
173    v4l2_buf.index      = index;
174    v4l2_buf.length     = node->planes;
175
176    for(i = 0; i < node->planes; i++){
177        v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
178        v4l2_buf.m.planes[i].length  = (unsigned long)(node->buffer[index].size.extS[i]);
179    }
180
181    ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
182
183    if (ret < 0)
184        ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
185
186    return ret;
187}
188
189int cam_int_streamon(node_info_t *node)
190{
191    enum v4l2_buf_type type = node->type;
192    int ret;
193
194
195    ret = exynos_v4l2_streamon(node->fd, type);
196
197    if (ret < 0)
198        ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
199
200    ALOGV("On streaming I/O... ... fd(%d)", node->fd);
201
202    return ret;
203}
204
205int cam_int_streamoff(node_info_t *node)
206{
207    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
208    int ret;
209
210
211    ALOGV("Off streaming I/O... fd(%d)", node->fd);
212    ret = exynos_v4l2_streamoff(node->fd, type);
213
214    if (ret < 0)
215        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
216
217    return ret;
218}
219
220int isp_int_streamoff(node_info_t *node)
221{
222    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
223    int ret;
224
225    ALOGV("Off streaming I/O... fd(%d)", node->fd);
226    ret = exynos_v4l2_streamoff(node->fd, type);
227
228    if (ret < 0)
229        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
230
231    return ret;
232}
233
234int cam_int_dqbuf(node_info_t *node)
235{
236    struct v4l2_buffer v4l2_buf;
237    struct v4l2_plane planes[VIDEO_MAX_PLANES];
238    int ret;
239
240    v4l2_buf.type       = node->type;
241    v4l2_buf.memory     = node->memory;
242    v4l2_buf.m.planes   = planes;
243    v4l2_buf.length     = node->planes;
244
245    ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
246    if (ret < 0)
247        ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
248
249    return v4l2_buf.index;
250}
251
252int cam_int_dqbuf(node_info_t *node, int num_plane)
253{
254    struct v4l2_buffer v4l2_buf;
255    struct v4l2_plane planes[VIDEO_MAX_PLANES];
256    int ret;
257
258    v4l2_buf.type       = node->type;
259    v4l2_buf.memory     = node->memory;
260    v4l2_buf.m.planes   = planes;
261    v4l2_buf.length     = num_plane;
262
263    ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
264    if (ret < 0)
265        ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
266
267    return v4l2_buf.index;
268}
269
270int cam_int_s_input(node_info_t *node, int index)
271{
272    int ret;
273
274    ret = exynos_v4l2_s_input(node->fd, index);
275    if (ret < 0)
276        ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
277
278    return ret;
279}
280
281
282gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
283
284RequestManager::RequestManager(SignalDrivenThread* main_thread):
285    m_numOfEntries(0),
286    m_entryInsertionIndex(-1),
287    m_entryProcessingIndex(-1),
288    m_entryFrameOutputIndex(-1),
289    m_lastAeMode(0),
290    m_lastAaMode(0),
291    m_lastAwbMode(0),
292    m_lastAeComp(0),
293    m_frameIndex(-1)
294{
295    m_metadataConverter = new MetadataConverter;
296    m_mainThread = main_thread;
297    for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
298        memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
299        entries[i].internal_shot.shot.ctl.request.frameCount = -1;
300    }
301    m_sensorPipelineSkipCnt = 0;
302    return;
303}
304
305RequestManager::~RequestManager()
306{
307    ALOGV("%s", __FUNCTION__);
308    if (m_metadataConverter != NULL) {
309        delete m_metadataConverter;
310        m_metadataConverter = NULL;
311    }
312
313    return;
314}
315
316int RequestManager::GetNumEntries()
317{
318    return m_numOfEntries;
319}
320
321void RequestManager::SetDefaultParameters(int cropX)
322{
323    m_cropX = cropX;
324}
325
326bool RequestManager::IsRequestQueueFull()
327{
328    Mutex::Autolock lock(m_requestMutex);
329    if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
330        return true;
331    else
332        return false;
333}
334
335void RequestManager::RegisterRequest(camera_metadata_t * new_request)
336{
337    ALOGV("DEBUG(%s):", __FUNCTION__);
338
339    Mutex::Autolock lock(m_requestMutex);
340
341    request_manager_entry * newEntry = NULL;
342    int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
343    ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex,m_numOfEntries );
344
345
346    newEntry = &(entries[newInsertionIndex]);
347
348    if (newEntry->status!=EMPTY) {
349        ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
350        return;
351    }
352    newEntry->status = REGISTERED;
353    newEntry->original_request = new_request;
354    memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
355    m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
356    newEntry->output_stream_count = newEntry->internal_shot.shot.ctl.request.outputStreams[15];
357
358    m_numOfEntries++;
359    m_entryInsertionIndex = newInsertionIndex;
360
361
362    ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
363    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
364}
365
366void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
367{
368    ALOGV("DEBUG(%s):", __FUNCTION__);
369    int frame_index;
370    request_manager_entry * currentEntry;
371
372    Mutex::Autolock lock(m_requestMutex);
373
374    frame_index = GetFrameIndex();
375    currentEntry =  &(entries[frame_index]);
376    if (currentEntry->status != CAPTURED) {
377        ALOGV("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__
378        , m_entryProcessingIndex, m_entryFrameOutputIndex,(int)(currentEntry->status));
379        return;
380    }
381    if (deregistered_request)  *deregistered_request = currentEntry->original_request;
382
383    currentEntry->status = EMPTY;
384    currentEntry->original_request = NULL;
385    memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
386    currentEntry->internal_shot.shot.ctl.request.frameCount = -1;
387    currentEntry->output_stream_count = 0;
388    currentEntry->dynamic_meta_vaild = false;
389    m_numOfEntries--;
390    ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
391     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
392
393    return;
394}
395
396bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
397                camera_metadata_t ** prepared_frame, int afState)
398{
399    ALOGV("DEBUG(%s):", __FUNCTION__);
400    Mutex::Autolock lock(m_requestMutex);
401    status_t res = NO_ERROR;
402    int tempFrameOutputIndex = GetFrameIndex();
403    request_manager_entry * currentEntry =  &(entries[tempFrameOutputIndex]);
404    ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
405        m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
406
407    if (currentEntry->status != CAPTURED) {
408        ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
409
410        return false;
411    }
412    m_entryFrameOutputIndex = tempFrameOutputIndex;
413    m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 20, 500); //estimated
414    add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1);
415    res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
416                m_tempFrameMetadata);
417    if (res!=NO_ERROR) {
418        ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
419        return false;
420    }
421    *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
422    *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
423    *prepared_frame = m_tempFrameMetadata;
424    ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d)", m_entryFrameOutputIndex,
425        currentEntry->internal_shot.shot.ctl.request.frameCount);
426    // Dump();
427    return true;
428}
429
430int RequestManager::MarkProcessingRequest(ExynosBuffer* buf, int *afMode)
431{
432
433    Mutex::Autolock lock(m_requestMutex);
434    struct camera2_shot_ext * shot_ext;
435    struct camera2_shot_ext * request_shot;
436    int targetStreamIndex = 0;
437    request_manager_entry * newEntry = NULL;
438    static int count = 0;
439
440    if (m_numOfEntries == 0)  {
441        ALOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
442        return -1;
443    }
444
445    if ((m_entryProcessingIndex == m_entryInsertionIndex)
446        && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
447        ALOGD("## MarkProcReq skipping(request underrun) -  num(%d), insert(%d), processing(%d), frame(%d)",
448         m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
449        return -1;
450    }
451
452    int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
453    ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
454
455    newEntry = &(entries[newProcessingIndex]);
456    request_shot = &(newEntry->internal_shot);
457    *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode);
458    if (newEntry->status != REGISTERED) {
459        ALOGD("DEBUG(%s)(%d): Circular buffer abnormal ", __FUNCTION__, newProcessingIndex);
460        return -1;
461    }
462
463    newEntry->status = REQUESTED;
464
465    shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
466
467    memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
468    shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
469    shot_ext->request_sensor = 1;
470    shot_ext->dis_bypass = 1;
471    shot_ext->dnr_bypass = 1;
472    shot_ext->fd_bypass = 1;
473    shot_ext->setfile = 0;
474
475    for (int i = 0; i < newEntry->output_stream_count; i++) {
476        targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[i];
477
478        if (targetStreamIndex==0) {
479            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP", __FUNCTION__, i);
480            shot_ext->request_scp = 1;
481	      if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
482                shot_ext->fd_bypass = 0;
483        }
484        else if (targetStreamIndex == 1) {
485            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerC", __FUNCTION__, i);
486            shot_ext->request_scc = 1;
487        }
488        else if (targetStreamIndex == 2) {
489            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP (record)", __FUNCTION__, i);
490            shot_ext->request_scp = 1;
491            shot_ext->shot.ctl.request.outputStreams[2] = 1;
492            if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
493                shot_ext->fd_bypass = 0;
494        }
495        else if (targetStreamIndex == 3) {
496            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP (previewCb)", __FUNCTION__, i);
497            shot_ext->request_scp = 1;
498            shot_ext->shot.ctl.request.outputStreams[3] = 1;
499        }
500        else {
501            ALOGV("DEBUG(%s): outputstreams(%d) has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
502        }
503    }
504
505    if (count == 0){
506        shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
507    } else
508        shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE;
509
510    count++;
511    shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
512    shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
513    shot_ext->shot.magicNumber = 0x23456789;
514    shot_ext->shot.ctl.sensor.exposureTime = 0;
515    shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
516    shot_ext->shot.ctl.sensor.sensitivity = 0;
517
518
519    shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0];
520    shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1];
521    shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2];
522
523    m_entryProcessingIndex = newProcessingIndex;
524    return newProcessingIndex;
525}
526
527void RequestManager::NotifyStreamOutput(int frameCnt, int stream_id)
528{
529    int index;
530
531    ALOGV("DEBUG(%s): frameCnt(%d), stream_id(%d)", __FUNCTION__, frameCnt, stream_id);
532
533    index = FindEntryIndexByFrameCnt(frameCnt);
534    if (index == -1) {
535        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
536        return;
537    }
538    ALOGV("DEBUG(%s): frameCnt(%d), stream_id(%d) last cnt (%d)", __FUNCTION__, frameCnt, stream_id,  entries[index].output_stream_count);
539
540    entries[index].output_stream_count--;  //TODO : match stream id also
541    CheckCompleted(index);
542    return;
543}
544
545void RequestManager::CheckCompleted(int index)
546{
547    ALOGV("DEBUG(%s): reqIndex(%d) current Count(%d)", __FUNCTION__, index, entries[index].output_stream_count);
548    SetFrameIndex(index);
549    m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
550    return;
551}
552
553void RequestManager::SetFrameIndex(int index)
554{
555    Mutex::Autolock lock(m_requestMutex);
556    m_frameIndex = index;
557}
558
559int RequestManager::GetFrameIndex()
560{
561    return m_frameIndex;
562}
563
564void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
565{
566    int index;
567    struct camera2_shot_ext * request_shot;
568    nsecs_t timeStamp;
569    int i;
570
571    ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
572
573    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
574        if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount)
575            && (entries[i].status == CAPTURED))
576            break;
577    }
578
579    if (i == NUM_MAX_REQUEST_MGR_ENTRY){
580        ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
581        return;
582    }
583
584    request_manager_entry * newEntry = &(entries[i]);
585    request_shot = &(newEntry->internal_shot);
586
587    newEntry->dynamic_meta_vaild = true;
588    timeStamp = request_shot->shot.dm.sensor.timeStamp;
589    memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
590    request_shot->shot.dm.sensor.timeStamp = timeStamp;
591    CheckCompleted(i);
592}
593
594void RequestManager::DumpInfoWithIndex(int index)
595{
596    struct camera2_shot_ext * currMetadata = &(entries[index].internal_shot);
597
598    ALOGV("####   frameCount(%d) exposureTime(%lld) ISO(%d)",
599        currMetadata->shot.ctl.request.frameCount,
600        currMetadata->shot.ctl.sensor.exposureTime,
601        currMetadata->shot.ctl.sensor.sensitivity);
602    if (currMetadata->shot.ctl.request.outputStreams[15] == 0)
603        ALOGV("####   No output stream selected");
604    else if (currMetadata->shot.ctl.request.outputStreams[15] == 1)
605        ALOGV("####   OutputStreamId : %d", currMetadata->shot.ctl.request.outputStreams[0]);
606    else if (currMetadata->shot.ctl.request.outputStreams[15] == 2)
607        ALOGV("####   OutputStreamId : %d, %d", currMetadata->shot.ctl.request.outputStreams[0],
608            currMetadata->shot.ctl.request.outputStreams[1]);
609    else
610        ALOGV("####   OutputStream num (%d) abnormal ", currMetadata->shot.ctl.request.outputStreams[15]);
611}
612
613void    RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt)
614{
615    int index, targetStreamIndex;
616    struct camera2_shot_ext * request_shot;
617
618    ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
619    if (frameCnt < 0)
620        return;
621
622    index = FindEntryIndexByFrameCnt(frameCnt);
623    if (index == -1) {
624        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
625        return;
626    }
627
628    request_manager_entry * newEntry = &(entries[index]);
629    request_shot = &(newEntry->internal_shot);
630    memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl));
631    shot_ext->request_sensor = 1;
632    shot_ext->dis_bypass = 1;
633    shot_ext->dnr_bypass = 1;
634    shot_ext->fd_bypass = 1;
635    shot_ext->setfile = 0;
636
637    shot_ext->request_scc = 0;
638    shot_ext->request_scp = 0;
639
640    shot_ext->shot.ctl.request.outputStreams[0] = 0;
641    shot_ext->shot.ctl.request.outputStreams[1] = 0;
642    shot_ext->shot.ctl.request.outputStreams[2] = 0;
643    shot_ext->shot.ctl.request.outputStreams[3] = 0;
644
645    shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0];
646    shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1];
647    shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2];
648
649    if (m_lastAaMode == request_shot->shot.ctl.aa.mode) {
650        shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0);
651    }
652    else {
653        shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode;
654        m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode);
655    }
656    if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) {
657        shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0);
658    }
659    else {
660        shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode;
661        m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode);
662    }
663    if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) {
664        shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0);
665    }
666    else {
667        shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode;
668        m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode);
669    }
670    if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) {
671        shot_ext->shot.ctl.aa.aeExpCompensation = 0;
672    }
673    else {
674        shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation;
675        m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation);
676    }
677
678    shot_ext->shot.ctl.aa.afTrigger = 0;
679
680    for (int i = 0; i < newEntry->output_stream_count; i++) {
681       targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[i];
682
683        if (targetStreamIndex==0) {
684            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP", __FUNCTION__, i);
685            shot_ext->request_scp = 1;
686            if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
687                shot_ext->fd_bypass = 0;
688        }
689        else if (targetStreamIndex == 1) {
690            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerC", __FUNCTION__, i);
691            shot_ext->request_scc = 1;
692            if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
693                shot_ext->fd_bypass = 0;
694        }
695        else if (targetStreamIndex == 2) {
696            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP (record)", __FUNCTION__, i);
697            shot_ext->request_scp = 1;
698            shot_ext->shot.ctl.request.outputStreams[2] = 1;
699            shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
700            shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
701            if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
702                shot_ext->fd_bypass = 0;
703        }
704        else if (targetStreamIndex == 3) {
705            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP (previewCb)", __FUNCTION__, i);
706            shot_ext->request_scp = 1;
707            shot_ext->shot.ctl.request.outputStreams[3] = 1;
708        }
709        else {
710            ALOGV("DEBUG(%s): outputstreams(%d) has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
711        }
712    }
713        ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__,
714        (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
715        (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode),
716        (int)(shot_ext->shot.ctl.aa.afMode));
717}
718
719int     RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
720{
721    for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
722        if (entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt)
723            return i;
724    }
725    return -1;
726}
727
728void    RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
729{
730    int index = FindEntryIndexByFrameCnt(frameCnt);
731    if (index == -1) {
732        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
733        return;
734    }
735
736    request_manager_entry * currentEntry = &(entries[index]);
737    currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
738    ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
739        index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
740}
741
742uint64_t  RequestManager::GetTimestamp(int index)
743{
744    if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
745        ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
746        return 0;
747    }
748
749    request_manager_entry * currentEntry = &(entries[index]);
750    uint64_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp;
751    ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
752    return frameTime;
753}
754
755int     RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
756{
757    int i;
758
759    if (m_numOfEntries == 0) {
760        ALOGV("(%s): No Entry found", __FUNCTION__);
761        return -1;
762    }
763
764    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
765        if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
766            continue;
767
768        if (entries[i].status == REQUESTED) {
769            entries[i].status = CAPTURED;
770            return entries[i].internal_shot.shot.ctl.request.frameCount;
771        }
772
773    }
774
775    ALOGD("(%s): No Entry found", __FUNCTION__);
776
777    return -1;
778}
779
780void     RequestManager::SetInitialSkip(int count)
781{
782    ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt);
783    if (count > m_sensorPipelineSkipCnt)
784        m_sensorPipelineSkipCnt = count;
785}
786
787int     RequestManager::GetSkipCnt()
788{
789    ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
790    if (m_sensorPipelineSkipCnt == 0)
791        return m_sensorPipelineSkipCnt;
792    else
793        return --m_sensorPipelineSkipCnt;
794}
795
796void RequestManager::Dump(void)
797{
798    int i = 0;
799    request_manager_entry * currentEntry;
800    ALOGD("## Dump  totalentry(%d), insert(%d), processing(%d), frame(%d)",
801    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
802
803    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
804        currentEntry =  &(entries[i]);
805        ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%d outstream[1]-%d", i,
806        currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
807            currentEntry->output_stream_count,
808            currentEntry->internal_shot.shot.ctl.request.outputStreams[0],
809            currentEntry->internal_shot.shot.ctl.request.outputStreams[1]);
810    }
811}
812
813int     RequestManager::GetNextIndex(int index)
814{
815    index++;
816    if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
817        index = 0;
818
819    return index;
820}
821
822ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid):
823            m_requestQueueOps(NULL),
824            m_frameQueueOps(NULL),
825            m_callbackCookie(NULL),
826            m_numOfRemainingReqInSvc(0),
827            m_isRequestQueuePending(false),
828            m_isRequestQueueNull(true),
829            m_isSensorThreadOn(false),
830            m_isSensorStarted(false),
831            m_isIspStarted(false),
832            m_ionCameraClient(0),
833            m_initFlag1(false),
834            m_initFlag2(false),
835            m_scp_flushing(false),
836            m_closing(false),
837            m_recordingEnabled(false),
838            m_needsRecordBufferInit(false),
839            m_needsPreviewCbBufferInit(false),
840            lastFrameCnt(-1),
841            m_scp_closing(false),
842            m_scp_closed(false),
843            m_afState(HAL_AFSTATE_INACTIVE),
844            m_afMode(NO_CHANGE),
845            m_afMode2(NO_CHANGE),
846            m_IsAfModeUpdateRequired(false),
847            m_IsAfTriggerRequired(false),
848            m_IsAfLockRequired(false),
849            m_wideAspect(false),
850            m_afTriggerId(0),
851            m_afPendingTriggerId(0),
852            m_afModeWaitingCnt(0),
853            m_halDevice(dev),
854            m_need_streamoff(0),
855            m_nightCaptureCnt(0),
856            m_afFlashEnableFlg(false),
857            m_cameraId(cameraId),
858            m_thumbNailW(160),
859            m_thumbNailH(120)
860{
861    ALOGV("DEBUG(%s):", __FUNCTION__);
862    int ret = 0;
863    int res = 0;
864
865    m_exynosPictureCSC = NULL;
866    m_exynosVideoCSC = NULL;
867
868    if (!m_grallocHal) {
869        ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
870        if (ret)
871            ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
872    }
873
874    m_camera2 = camera;
875    m_ionCameraClient = createIonClient(m_ionCameraClient);
876    if(m_ionCameraClient == 0)
877        ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
878
879
880    m_BayerManager = new BayerBufManager();
881    m_mainThread    = new MainThread(this);
882    *openInvalid = InitializeISPChain();
883    if (*openInvalid < 0) {
884        // clean process
885        // 1. close video nodes
886        // SCP
887        res = exynos_v4l2_close(m_fd_scp);
888        if (res != NO_ERROR ) {
889            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
890        }
891        // SCC
892        res = exynos_v4l2_close(m_camera_info.capture.fd);
893        if (res != NO_ERROR ) {
894            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
895        }
896        // Sensor
897        res = exynos_v4l2_close(m_camera_info.sensor.fd);
898        if (res != NO_ERROR ) {
899            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
900        }
901        // ISP
902        res = exynos_v4l2_close(m_camera_info.isp.fd);
903        if (res != NO_ERROR ) {
904            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
905        }
906    } else {
907        m_sensorThread  = new SensorThread(this);
908        m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
909        ALOGV("DEBUG(%s): created sensorthread ################", __FUNCTION__);
910        m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
911        CSC_METHOD cscMethod = CSC_METHOD_HW;
912        m_exynosPictureCSC = csc_init(cscMethod);
913        if (m_exynosPictureCSC == NULL)
914            ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
915        csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
916
917        m_exynosVideoCSC = csc_init(cscMethod);
918        if (m_exynosVideoCSC == NULL)
919            ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
920        csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
921
922        m_setExifFixedAttribute();
923    }
924}
925
926ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
927{
928    ALOGV("%s: ENTER", __FUNCTION__);
929    this->release();
930    ALOGV("%s: EXIT", __FUNCTION__);
931}
932
933void ExynosCameraHWInterface2::release()
934{
935    int i, res;
936    ALOGD("%s: ENTER", __func__);
937    m_closing = true;
938
939    if (m_streamThreads[1] != NULL) {
940        m_streamThreads[1]->release();
941        m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
942    }
943
944    if (m_streamThreads[0] != NULL) {
945        m_streamThreads[0]->release();
946        m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
947    }
948
949    if (m_ispThread != NULL) {
950        m_ispThread->release();
951    }
952
953    if (m_sensorThread != NULL) {
954        m_sensorThread->release();
955    }
956
957    if (m_mainThread != NULL) {
958        m_mainThread->release();
959    }
960
961    if (m_exynosPictureCSC)
962        csc_deinit(m_exynosPictureCSC);
963    m_exynosPictureCSC = NULL;
964
965    if (m_exynosVideoCSC)
966        csc_deinit(m_exynosVideoCSC);
967    m_exynosVideoCSC = NULL;
968
969    if (m_streamThreads[1] != NULL) {
970        while (!m_streamThreads[1]->IsTerminated())
971        {
972            ALOGD("Waiting for ISP thread is tetminated");
973            usleep(100000);
974        }
975        m_streamThreads[1] = NULL;
976    }
977
978    if (m_streamThreads[0] != NULL) {
979        while (!m_streamThreads[0]->IsTerminated())
980        {
981            ALOGD("Waiting for sensor thread is tetminated");
982            usleep(100000);
983        }
984        m_streamThreads[0] = NULL;
985    }
986
987    if (m_ispThread != NULL) {
988        while (!m_ispThread->IsTerminated())
989        {
990            ALOGD("Waiting for isp thread is tetminated");
991            usleep(100000);
992        }
993        m_ispThread = NULL;
994    }
995
996    if (m_sensorThread != NULL) {
997        while (!m_sensorThread->IsTerminated())
998        {
999            ALOGD("Waiting for sensor thread is tetminated");
1000            usleep(100000);
1001        }
1002        m_sensorThread = NULL;
1003    }
1004
1005    if (m_mainThread != NULL) {
1006        while (!m_mainThread->IsTerminated())
1007        {
1008            ALOGD("Waiting for main thread is tetminated");
1009            usleep(100000);
1010        }
1011        m_mainThread = NULL;
1012    }
1013
1014    if (m_requestManager != NULL) {
1015        delete m_requestManager;
1016        m_requestManager = NULL;
1017    }
1018
1019    if (m_BayerManager != NULL) {
1020        delete m_BayerManager;
1021        m_BayerManager = NULL;
1022    }
1023//    for(i = 0; i < m_camera_info.sensor.buffers; i++)
1024    for (i = 0; i < NUM_BAYER_BUFFERS; i++)
1025        freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1026
1027    for(i = 0; i < m_camera_info.capture.buffers; i++)
1028        freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1029
1030    ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
1031    res = exynos_v4l2_close(m_camera_info.sensor.fd);
1032    if (res != NO_ERROR ) {
1033        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1034    }
1035
1036    ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
1037    res = exynos_v4l2_close(m_camera_info.isp.fd);
1038    if (res != NO_ERROR ) {
1039        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1040    }
1041
1042    ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
1043    res = exynos_v4l2_close(m_camera_info.capture.fd);
1044    if (res != NO_ERROR ) {
1045        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1046    }
1047
1048    ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
1049    res = exynos_v4l2_close(m_fd_scp);
1050    if (res != NO_ERROR ) {
1051        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1052    }
1053    ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
1054    deleteIonClient(m_ionCameraClient);
1055
1056    ALOGV("%s: EXIT", __func__);
1057}
1058
1059int ExynosCameraHWInterface2::InitializeISPChain()
1060{
1061    char node_name[30];
1062    int fd = 0;
1063    int i;
1064    int ret = 0;
1065
1066    /* Open Sensor */
1067    memset(&node_name, 0x00, sizeof(char[30]));
1068    sprintf(node_name, "%s%d", NODE_PREFIX, 40);
1069    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1070
1071    if (fd < 0) {
1072        ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1073    }
1074    else {
1075        ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1076    }
1077    m_camera_info.sensor.fd = fd;
1078
1079    /* Open ISP */
1080    memset(&node_name, 0x00, sizeof(char[30]));
1081    sprintf(node_name, "%s%d", NODE_PREFIX, 41);
1082    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1083
1084    if (fd < 0) {
1085        ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1086    }
1087    else {
1088        ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1089    }
1090    m_camera_info.isp.fd = fd;
1091
1092    /* Open ScalerC */
1093    memset(&node_name, 0x00, sizeof(char[30]));
1094    sprintf(node_name, "%s%d", NODE_PREFIX, 42);
1095    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1096
1097    if (fd < 0) {
1098        ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1099    }
1100    else {
1101        ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1102    }
1103    m_camera_info.capture.fd = fd;
1104
1105    /* Open ScalerP */
1106    memset(&node_name, 0x00, sizeof(char[30]));
1107    sprintf(node_name, "%s%d", NODE_PREFIX, 44);
1108    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1109    if (fd < 0) {
1110        ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1111    }
1112    else {
1113        ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1114    }
1115    m_fd_scp = fd;
1116
1117    if(m_cameraId == 0)
1118        m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
1119    else
1120        m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
1121
1122    memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
1123    m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
1124    m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
1125
1126    m_camera_info.dummy_shot.dis_bypass = 1;
1127    m_camera_info.dummy_shot.dnr_bypass = 1;
1128    m_camera_info.dummy_shot.fd_bypass = 1;
1129
1130    /*sensor setting*/
1131    m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
1132    m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
1133    m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
1134
1135    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
1136    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
1137
1138    /*request setting*/
1139    m_camera_info.dummy_shot.request_sensor = 1;
1140    m_camera_info.dummy_shot.request_scc = 0;
1141    m_camera_info.dummy_shot.request_scp = 0;
1142    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
1143    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[1] = 0;
1144    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[2] = 0;
1145    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[3] = 0;
1146
1147    m_camera_info.sensor.width = m_camera2->getSensorRawW();
1148    m_camera_info.sensor.height = m_camera2->getSensorRawH();
1149
1150    m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
1151    m_camera_info.sensor.planes = 2;
1152    m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1153    m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1154    m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
1155    m_camera_info.sensor.ionClient = m_ionCameraClient;
1156
1157    for(i = 0; i < m_camera_info.sensor.buffers; i++){
1158        initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1159        m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
1160        m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
1161        allocCameraMemory(m_camera_info.sensor.ionClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1162    }
1163
1164    m_camera_info.isp.width = m_camera_info.sensor.width;
1165    m_camera_info.isp.height = m_camera_info.sensor.height;
1166    m_camera_info.isp.format = m_camera_info.sensor.format;
1167    m_camera_info.isp.planes = m_camera_info.sensor.planes;
1168    m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1169    m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1170    m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
1171    m_camera_info.isp.ionClient = m_ionCameraClient;
1172
1173    for(i = 0; i < m_camera_info.isp.buffers; i++){
1174        initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
1175        m_camera_info.isp.buffer[i].size.extS[0]    = m_camera_info.sensor.buffer[i].size.extS[0];
1176        m_camera_info.isp.buffer[i].size.extS[1]    = m_camera_info.sensor.buffer[i].size.extS[1];
1177        m_camera_info.isp.buffer[i].fd.extFd[0]     = m_camera_info.sensor.buffer[i].fd.extFd[0];
1178        m_camera_info.isp.buffer[i].fd.extFd[1]     = m_camera_info.sensor.buffer[i].fd.extFd[1];
1179        m_camera_info.isp.buffer[i].virt.extP[0]    = m_camera_info.sensor.buffer[i].virt.extP[0];
1180        m_camera_info.isp.buffer[i].virt.extP[1]    = m_camera_info.sensor.buffer[i].virt.extP[1];
1181    };
1182
1183    /* init ISP */
1184    ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
1185    if (ret < 0) {
1186        ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ",  __FUNCTION__, m_camera_info.sensor_id);
1187        return false;
1188    }
1189    cam_int_s_fmt(&(m_camera_info.isp));
1190    ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
1191    cam_int_reqbufs(&(m_camera_info.isp));
1192    ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
1193    ALOGV("DEBUG(%s): isp mem alloc done",  __FUNCTION__);
1194
1195    /* init Sensor */
1196    cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
1197    ALOGV("DEBUG(%s): sensor s_input done",  __FUNCTION__);
1198    if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
1199        ALOGE("ERR(%s): sensor s_fmt fail",  __FUNCTION__);
1200    }
1201    ALOGV("DEBUG(%s): sensor s_fmt done",  __FUNCTION__);
1202    cam_int_reqbufs(&(m_camera_info.sensor));
1203    ALOGV("DEBUG(%s): sensor reqbuf done",  __FUNCTION__);
1204    for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1205        ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1206        memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1207                sizeof(struct camera2_shot_ext));
1208        m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1209        m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1210        cam_int_qbuf(&(m_camera_info.sensor), i);
1211    }
1212    ALOGV("== stream_on :: .sensor");
1213    cam_int_streamon(&(m_camera_info.sensor));
1214
1215    /* init Capture */
1216    m_camera_info.capture.width = m_camera2->getSensorW();
1217    m_camera_info.capture.height = m_camera2->getSensorH();
1218    m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
1219#ifdef ENABLE_FRAME_SYNC
1220    m_camera_info.capture.planes = 2;
1221#else
1222    m_camera_info.capture.planes = 1;
1223#endif
1224    m_camera_info.capture.buffers = 8;
1225    m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1226    m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
1227    m_camera_info.capture.ionClient = m_ionCameraClient;
1228
1229    for(i = 0; i < m_camera_info.capture.buffers; i++){
1230        initCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1231        m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
1232#ifdef ENABLE_FRAME_SYNC
1233        m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value
1234#endif
1235        allocCameraMemory(m_camera_info.capture.ionClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1236    }
1237
1238    cam_int_s_input(&(m_camera_info.capture), m_camera_info.sensor_id);
1239    cam_int_s_fmt(&(m_camera_info.capture));
1240    ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1241    cam_int_reqbufs(&(m_camera_info.capture));
1242    ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1243
1244    for (i = 0; i < m_camera_info.capture.buffers; i++) {
1245        ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1246        cam_int_qbuf(&(m_camera_info.capture), i);
1247    }
1248
1249    ALOGV("== stream_on :: capture");
1250    if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1251        ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1252    } else {
1253        m_camera_info.capture.status = true;
1254    }
1255
1256    return true;
1257}
1258
1259void ExynosCameraHWInterface2::StartISP()
1260{
1261    ALOGV("== stream_on :: isp");
1262    cam_int_streamon(&(m_camera_info.isp));
1263    exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
1264}
1265
1266int ExynosCameraHWInterface2::getCameraId() const
1267{
1268    return m_cameraId;
1269}
1270
1271int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
1272{
1273    ALOGV("DEBUG(%s):", __FUNCTION__);
1274    if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
1275            && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
1276        m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
1277        return 0;
1278    }
1279    else {
1280        ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
1281        return 1;
1282    }
1283}
1284
1285int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
1286{
1287    ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries());
1288    if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
1289        ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
1290        return 0;
1291    }
1292    m_isRequestQueueNull = false;
1293    if (m_requestManager->GetNumEntries() == 0)
1294        m_requestManager->SetInitialSkip(5);
1295    m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
1296    return 0;
1297}
1298
1299int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
1300{
1301    ALOGV("DEBUG(%s):", __FUNCTION__);
1302    if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
1303            && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
1304        m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
1305        return 0;
1306    }
1307    else {
1308        ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
1309        return 1;
1310    }
1311}
1312
1313int ExynosCameraHWInterface2::getInProgressCount()
1314{
1315    int inProgressCount = m_requestManager->GetNumEntries();
1316    ALOGV("DEBUG(%s): # of dequeued req (%d)", __FUNCTION__, inProgressCount);
1317    return inProgressCount;
1318}
1319
1320int ExynosCameraHWInterface2::flushCapturesInProgress()
1321{
1322    return 0;
1323}
1324
1325int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
1326{
1327    ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
1328
1329    if (request == NULL) return BAD_VALUE;
1330    if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
1331        return BAD_VALUE;
1332    }
1333    status_t res;
1334    // Pass 1, calculate size and allocate
1335    res = m_camera2->constructDefaultRequest(request_template,
1336            request,
1337            true);
1338    if (res != OK) {
1339        return res;
1340    }
1341    // Pass 2, build request
1342    res = m_camera2->constructDefaultRequest(request_template,
1343            request,
1344            false);
1345    if (res != OK) {
1346        ALOGE("Unable to populate new request for template %d",
1347                request_template);
1348    }
1349
1350    return res;
1351}
1352
1353int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
1354                                    uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
1355{
1356    ALOGV("DEBUG(%s): allocate stream width(%d) height(%d) format(%x)", __FUNCTION__,  width, height, format);
1357    char node_name[30];
1358    int fd = 0, allocCase = 0;
1359    StreamThread *AllocatedStream;
1360    stream_parameters_t newParameters;
1361
1362    if (format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE &&
1363        m_camera2->isSupportedResolution(width, height)) {
1364        if (!(m_streamThreads[0].get())) {
1365            ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
1366            allocCase = 0;
1367        }
1368        else {
1369            if ((m_streamThreads[0].get())->m_activated == true) {
1370                ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
1371                allocCase = 1;
1372            }
1373            else {
1374                ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
1375                allocCase = 2;
1376            }
1377        }
1378        if ((width == 1920 && height == 1080) || (width == 1280 && height == 720)) {
1379            m_wideAspect = true;
1380        }
1381        else {
1382            m_wideAspect = false;
1383        }
1384        ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect);
1385
1386        if (allocCase == 0 || allocCase == 2) {
1387            *stream_id = 0;
1388
1389            if (allocCase == 0) {
1390                m_streamThreads[0]  = new StreamThread(this, *stream_id);
1391             }
1392            AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
1393            m_scp_flushing = false;
1394            m_scp_closing = false;
1395            m_scp_closed = false;
1396            usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
1397
1398            *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1399            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1400            *max_buffers = 8;
1401
1402            newParameters.streamType    = STREAM_TYPE_DIRECT;
1403            newParameters.outputWidth   = width;
1404            newParameters.outputHeight  = height;
1405            newParameters.nodeWidth     = width;
1406            newParameters.nodeHeight    = height;
1407            newParameters.outputFormat  = *format_actual;
1408            newParameters.nodeFormat    = HAL_PIXEL_FORMAT_2_V4L2_PIX(*format_actual);
1409            newParameters.streamOps     = stream_ops;
1410            newParameters.usage         = *usage;
1411            newParameters.numHwBuffers  = 8;
1412            newParameters.numOwnSvcBuffers = *max_buffers;
1413            newParameters.fd            = m_fd_scp;
1414            newParameters.nodePlanes    = NUM_PLANES(*format_actual);
1415            newParameters.svcPlanes     = NUM_PLANES(*format_actual);
1416            newParameters.metaPlanes     = 1;
1417            newParameters.halBuftype    = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1418            newParameters.memory        = V4L2_MEMORY_DMABUF;
1419            newParameters.ionClient     = m_ionCameraClient;
1420            newParameters.numSvcBufsInHal  = 0;
1421            AllocatedStream->m_index = *stream_id;
1422            AllocatedStream->setParameter(&newParameters);
1423            AllocatedStream->m_activated = true;
1424
1425            m_scp_flushing = false;
1426            m_scp_closing = false;
1427            m_scp_closed = false;
1428            m_requestManager->SetDefaultParameters(m_camera2->getSensorW());
1429            m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW();
1430            return 0;
1431        }
1432        else if (allocCase == 1) {
1433            record_parameters_t recordParameters;
1434            StreamThread *parentStream;
1435            parentStream = (StreamThread*)(m_streamThreads[0].get());
1436            if (!parentStream) {
1437                return 1;
1438                // TODO
1439            }
1440            *stream_id = 2;
1441            usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
1442
1443            *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M
1444            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1445            *max_buffers = 6;
1446
1447            recordParameters.outputWidth   = width;
1448            recordParameters.outputHeight  = height;
1449            recordParameters.outputFormat     = *format_actual;
1450            recordParameters.svcPlanes        = NUM_PLANES(*format_actual);
1451            recordParameters.streamOps     = stream_ops;
1452            recordParameters.usage         = *usage;
1453            recordParameters.numOwnSvcBuffers = *max_buffers;
1454            recordParameters.numSvcBufsInHal  = 0;
1455
1456            parentStream->setRecordingParameter(&recordParameters);
1457            m_scp_flushing = false;
1458            m_scp_closing = false;
1459            m_scp_closed = false;
1460            m_recordingEnabled = true;
1461            return 0;
1462        }
1463    }
1464    else if (format == HAL_PIXEL_FORMAT_BLOB
1465            && m_camera2->isSupportedJpegResolution(width, height)) {
1466
1467        *stream_id = 1;
1468
1469        if (!(m_streamThreads[*stream_id].get())) {
1470            ALOGV("DEBUG(%s): stream 1 not exist", __FUNCTION__);
1471            m_streamThreads[1]  = new StreamThread(this, *stream_id);
1472            allocCase = 0;
1473        }
1474        else {
1475            if ((m_streamThreads[*stream_id].get())->m_activated == true) {
1476                ALOGV("DEBUG(%s): stream 1 exists and activated.", __FUNCTION__);
1477                allocCase = 1;
1478            }
1479            else {
1480                ALOGV("DEBUG(%s): stream 1 exists and deactivated.", __FUNCTION__);
1481                allocCase = 2;
1482            }
1483        }
1484
1485        AllocatedStream = (StreamThread*)(m_streamThreads[*stream_id].get());
1486
1487        fd = m_camera_info.capture.fd;
1488        usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
1489
1490        *format_actual = HAL_PIXEL_FORMAT_BLOB;
1491
1492        *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1493        *max_buffers = 4;
1494
1495        newParameters.streamType    = STREAM_TYPE_INDIRECT;
1496        newParameters.outputWidth   = width;
1497        newParameters.outputHeight  = height;
1498
1499        newParameters.nodeWidth     = m_camera2->getSensorW();
1500        newParameters.nodeHeight    = m_camera2->getSensorH();
1501
1502        newParameters.outputFormat  = *format_actual;
1503        newParameters.nodeFormat    = V4L2_PIX_FMT_YUYV;
1504        newParameters.streamOps     = stream_ops;
1505        newParameters.usage         = *usage;
1506        newParameters.numHwBuffers  = 8;
1507        newParameters.numOwnSvcBuffers = *max_buffers;
1508        newParameters.fd            = fd;
1509        newParameters.nodePlanes    = 1;
1510        newParameters.svcPlanes     = 1;
1511        newParameters.halBuftype    = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1512        newParameters.memory        = V4L2_MEMORY_DMABUF;
1513        newParameters.ionClient     = m_ionCameraClient;
1514        newParameters.numSvcBufsInHal  = 0;
1515        AllocatedStream->m_index = *stream_id;
1516        AllocatedStream->setParameter(&newParameters);
1517        return 0;
1518    }
1519    else if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12) {
1520        StreamThread *parentStream;
1521        callback_parameters_t callbackParameters;
1522        parentStream = (StreamThread*)(m_streamThreads[0].get());
1523        if (!parentStream) {
1524            ALOGE("(%s): preview stream not exist", __FUNCTION__);
1525            return 1;
1526        }
1527        *stream_id = 3;
1528
1529        *format_actual = format;
1530        *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1531        *max_buffers = 4;
1532        if (width == parentStream->m_parameters.outputWidth
1533                && height == parentStream->m_parameters.outputHeight) {
1534
1535            callbackParameters.outputWidth   = width;
1536            callbackParameters.outputHeight  = height;
1537            callbackParameters.outputFormat  = *format_actual;
1538            callbackParameters.svcPlanes     = NUM_PLANES(*format_actual);
1539            callbackParameters.streamOps     = stream_ops;
1540            callbackParameters.usage         = *usage;
1541            callbackParameters.numOwnSvcBuffers = *max_buffers;
1542            callbackParameters.numSvcBufsInHal  = 0;
1543            if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
1544                callbackParameters.internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP;
1545                callbackParameters.internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP);
1546            }
1547            else {
1548                callbackParameters.internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1549                callbackParameters.internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12);
1550            }
1551
1552            parentStream->setCallbackParameter(&callbackParameters);
1553            m_previewCbEnabled = true;
1554            ALOGV("(%s): Enabling Previewcb - planes(%d)", __FUNCTION__, callbackParameters.svcPlanes);
1555        }
1556        return 0;
1557    }
1558    ALOGE("DEBUG(%s): Unsupported Pixel Format", __FUNCTION__);
1559    return 1; // TODO : check proper error code
1560}
1561
1562int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
1563        int num_buffers, buffer_handle_t *registeringBuffers)
1564{
1565    int                     i,j;
1566    void                    *virtAddr[3];
1567    uint32_t                plane_index = 0;
1568    stream_parameters_t     *targetStreamParms;
1569    record_parameters_t     *targetRecordParms;
1570    callback_parameters_t   *targetCallbackParms;
1571    node_info_t             *currentNode;
1572
1573    struct v4l2_buffer v4l2_buf;
1574    struct v4l2_plane  planes[VIDEO_MAX_PLANES];
1575
1576    ALOGV("DEBUG(%s): streamID (%d), num_buff(%d), handle(%x) ", __FUNCTION__,
1577        stream_id, num_buffers, (uint32_t)registeringBuffers);
1578
1579    if (stream_id == 0) {
1580        targetStreamParms = &(m_streamThreads[0]->m_parameters);
1581    }
1582    else if (stream_id == 1) {
1583        targetStreamParms = &(m_streamThreads[1]->m_parameters);
1584        // TODO : make clear stream off case
1585        m_need_streamoff = 0;
1586
1587        if (m_camera_info.capture.status == false) {
1588            /* capture */
1589            m_camera_info.capture.buffers = 8;
1590            cam_int_s_fmt(&(m_camera_info.capture));
1591            cam_int_reqbufs(&(m_camera_info.capture));
1592            for (i = 0; i < m_camera_info.capture.buffers; i++) {
1593                ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1594                cam_int_qbuf(&(m_camera_info.capture), i);
1595            }
1596
1597            if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1598                ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1599            } else {
1600                m_camera_info.capture.status = true;
1601            }
1602        }
1603    }
1604    else if (stream_id == 2) {
1605        m_need_streamoff = 0;
1606        targetRecordParms = &(m_streamThreads[0]->m_recordParameters);
1607
1608        targetRecordParms->numSvcBuffers = num_buffers;
1609
1610        for (i = 0 ; i<targetRecordParms->numSvcBuffers ; i++) {
1611            ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
1612                i, (uint32_t)(registeringBuffers[i]));
1613            if (m_grallocHal) {
1614                if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
1615                       targetRecordParms->usage, 0, 0,
1616                       targetRecordParms->outputWidth, targetRecordParms->outputHeight, virtAddr) != 0) {
1617                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
1618                }
1619                else {
1620                    ExynosBuffer currentBuf;
1621                    const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
1622                    currentBuf.fd.extFd[0] = priv_handle->fd;
1623                    currentBuf.fd.extFd[1] = priv_handle->fd1;
1624                    currentBuf.fd.extFd[2] = priv_handle->fd2;
1625                    for (plane_index=0 ; plane_index < targetRecordParms->svcPlanes ; plane_index++) {
1626                        currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
1627                        ALOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x)",
1628                             __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index],
1629                             (unsigned int)currentBuf.virt.extP[plane_index]);
1630                    }
1631                    targetRecordParms->svcBufStatus[i]  = ON_SERVICE;
1632                    targetRecordParms->svcBuffers[i]    = currentBuf;
1633                    targetRecordParms->svcBufHandle[i]  = registeringBuffers[i];
1634                }
1635            }
1636        }
1637        m_needsRecordBufferInit = true;
1638        return 0;
1639    }
1640    else if (stream_id == 3) {
1641        targetCallbackParms = &(m_streamThreads[0]->m_previewCbParameters);
1642
1643        targetCallbackParms->numSvcBuffers = num_buffers;
1644
1645        for (i = 0 ; i < targetCallbackParms->numSvcBuffers ; i++) {
1646            ALOGE("%s: registering Stream Buffers[%d] (%x) ", __FUNCTION__,
1647                i, (uint32_t)(registeringBuffers[i]));
1648            if (m_grallocHal) {
1649                if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
1650                       targetCallbackParms->usage, 0, 0,
1651                       targetCallbackParms->outputWidth, targetCallbackParms->outputHeight, virtAddr) != 0) {
1652                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
1653                }
1654                else {
1655                    ExynosBuffer currentBuf;
1656                    const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
1657                    currentBuf.fd.extFd[0] = priv_handle->fd;
1658                    currentBuf.fd.extFd[1] = priv_handle->fd1;
1659                    currentBuf.fd.extFd[2] = priv_handle->fd2;
1660                    for (plane_index = 0 ; plane_index < targetCallbackParms->svcPlanes ; plane_index++) {
1661                        currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
1662                    }
1663                    ALOGV("fd(%d) addr(%x) fd1(%d) fd2(%d)", priv_handle->fd, (unsigned int)currentBuf.virt.extP[plane_index],
1664                        priv_handle->fd1, priv_handle->fd2);
1665                    ALOGV("flags(%d) size(%d) offset(%d) stride(%d) vstride(%d)",
1666                        priv_handle->flags, priv_handle->size, priv_handle->offset,
1667                        priv_handle->stride, priv_handle->vstride);
1668                    targetCallbackParms->svcBufStatus[i]  = ON_SERVICE;
1669                    targetCallbackParms->svcBuffers[i]    = currentBuf;
1670                    targetCallbackParms->svcBufHandle[i]  = registeringBuffers[i];
1671                }
1672            }
1673        }
1674        m_needsPreviewCbBufferInit = true;
1675        return 0;
1676    }
1677    else {
1678        ALOGE("ERR(%s) unregisterd stream id (%d)", __FUNCTION__, stream_id);
1679        return 1;
1680    }
1681
1682    if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
1683        if (num_buffers < targetStreamParms->numHwBuffers) {
1684            ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
1685                __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
1686            return 1;
1687        }
1688    }
1689    ALOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
1690            __FUNCTION__, targetStreamParms->outputFormat, targetStreamParms->outputWidth,
1691            targetStreamParms->outputHeight, targetStreamParms->svcPlanes);
1692
1693    targetStreamParms->numSvcBuffers = num_buffers;
1694    currentNode = &(targetStreamParms->node); // TO Remove
1695
1696    currentNode->fd         = targetStreamParms->fd;
1697    currentNode->width      = targetStreamParms->nodeWidth;
1698    currentNode->height     = targetStreamParms->nodeHeight;
1699    currentNode->format     = targetStreamParms->nodeFormat;
1700    currentNode->planes     = targetStreamParms->nodePlanes;
1701    currentNode->buffers    = targetStreamParms->numHwBuffers;
1702    currentNode->type       = targetStreamParms->halBuftype;
1703    currentNode->memory     = targetStreamParms->memory;
1704    currentNode->ionClient  = targetStreamParms->ionClient;
1705
1706    if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
1707        if(m_need_streamoff == 1) {
1708            if (m_sensorThread != NULL) {
1709                m_sensorThread->release();
1710                /* TODO */
1711                usleep(500000);
1712            } else {
1713                ALOGE("+++++++ sensor thread is NULL %d", __LINE__);
1714            }
1715
1716            ALOGV("(%s): calling capture streamoff", __FUNCTION__);
1717            if (cam_int_streamoff(&(m_camera_info.capture)) < 0) {
1718                ALOGE("ERR(%s): capture stream off fail", __FUNCTION__);
1719            } else {
1720                m_camera_info.capture.status = false;
1721            }
1722
1723            ALOGV("(%s): calling capture streamoff done", __FUNCTION__);
1724
1725            m_camera_info.capture.buffers = 0;
1726            ALOGV("DEBUG(%s): capture calling reqbuf 0 ", __FUNCTION__);
1727            cam_int_reqbufs(&(m_camera_info.capture));
1728            ALOGV("DEBUG(%s): capture calling reqbuf 0 done", __FUNCTION__);
1729
1730            m_isIspStarted = false;
1731        }
1732
1733        if (m_need_streamoff == 1) {
1734            m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1735            m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1736            m_camera_info.capture.buffers = 8;
1737            /* isp */
1738            cam_int_s_fmt(&(m_camera_info.isp));
1739            cam_int_reqbufs(&(m_camera_info.isp));
1740            /* sensor */
1741            cam_int_s_fmt(&(m_camera_info.sensor));
1742            cam_int_reqbufs(&(m_camera_info.sensor));
1743
1744            for (i = 0; i < 8; i++) {
1745                ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1746                memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1747                        sizeof(struct camera2_shot_ext));
1748                m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1749                m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1750                cam_int_qbuf(&(m_camera_info.sensor), i);
1751            }
1752
1753            /* capture */
1754            cam_int_s_fmt(&(m_camera_info.capture));
1755            cam_int_reqbufs(&(m_camera_info.capture));
1756            for (i = 0; i < m_camera_info.capture.buffers; i++) {
1757                ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1758                cam_int_qbuf(&(m_camera_info.capture), i);
1759            }
1760
1761       }
1762
1763        cam_int_s_input(currentNode, m_camera_info.sensor_id);
1764        cam_int_s_fmt(currentNode);
1765        cam_int_reqbufs(currentNode);
1766
1767    }
1768    else if (targetStreamParms->streamType == STREAM_TYPE_INDIRECT) {
1769        for(i = 0; i < currentNode->buffers; i++){
1770            memcpy(&(currentNode->buffer[i]), &(m_camera_info.capture.buffer[i]), sizeof(ExynosBuffer));
1771        }
1772    }
1773
1774    for (i = 0 ; i<targetStreamParms->numSvcBuffers ; i++) {
1775        ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
1776            i, (uint32_t)(registeringBuffers[i]));
1777        if (m_grallocHal) {
1778            if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
1779                   targetStreamParms->usage, 0, 0,
1780                   currentNode->width, currentNode->height, virtAddr) != 0) {
1781                ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
1782            }
1783            else {
1784                v4l2_buf.m.planes   = planes;
1785                v4l2_buf.type       = currentNode->type;
1786                v4l2_buf.memory     = currentNode->memory;
1787                v4l2_buf.index      = i;
1788                v4l2_buf.length     = currentNode->planes;
1789
1790                ExynosBuffer currentBuf;
1791                ExynosBuffer metaBuf;
1792                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
1793
1794                m_getAlignedYUVSize(currentNode->format,
1795                    currentNode->width, currentNode->height, &currentBuf);
1796
1797                ALOGV("DEBUG(%s):  ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride);
1798                if (currentNode->planes == 1) {
1799                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
1800                    currentBuf.fd.extFd[0] = priv_handle->fd;
1801                    currentBuf.size.extS[0] = priv_handle->size;
1802                    currentBuf.size.extS[1] = 0;
1803                    currentBuf.size.extS[2] = 0;
1804                } else if (currentNode->planes == 2) {
1805                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
1806                    v4l2_buf.m.planes[1].m.fd = priv_handle->fd1;
1807                    currentBuf.fd.extFd[0] = priv_handle->fd;
1808                    currentBuf.fd.extFd[1] = priv_handle->fd1;
1809
1810                } else if (currentNode->planes == 3) {
1811                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
1812                    v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
1813                    v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
1814                    currentBuf.fd.extFd[0] = priv_handle->fd;
1815                    currentBuf.fd.extFd[2] = priv_handle->fd1;
1816                    currentBuf.fd.extFd[1] = priv_handle->fd2;
1817                }
1818                for (plane_index = 0 ; plane_index < v4l2_buf.length ; plane_index++) {
1819                    currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
1820                    v4l2_buf.m.planes[plane_index].length  = currentBuf.size.extS[plane_index];
1821                    ALOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x), length(%d)",
1822                         __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
1823                         (unsigned int)currentBuf.virt.extP[plane_index],
1824                         v4l2_buf.m.planes[plane_index].length);
1825                }
1826
1827                if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
1828                    if (i < currentNode->buffers) {
1829
1830
1831#ifdef ENABLE_FRAME_SYNC
1832                        /* add plane for metadata*/
1833                        metaBuf.size.extS[0] = 4*1024;
1834                        allocCameraMemory(targetStreamParms->ionClient , &metaBuf, 1);
1835
1836                        v4l2_buf.length += targetStreamParms->metaPlanes;
1837                        v4l2_buf.m.planes[3].m.fd = metaBuf.fd.extFd[0];
1838                        v4l2_buf.m.planes[3].length = metaBuf.size.extS[0];
1839
1840                        ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
1841#endif
1842                        if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
1843                            ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
1844                                __FUNCTION__, stream_id, currentNode->fd);
1845                            //return false;
1846                        }
1847                        ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
1848                                __FUNCTION__, stream_id, currentNode->fd);
1849                        targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
1850                    }
1851                    else {
1852                        targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
1853                    }
1854                }
1855                else if (targetStreamParms->streamType == STREAM_TYPE_INDIRECT) {
1856                    targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
1857                }
1858                targetStreamParms->svcBuffers[i]       = currentBuf;
1859                targetStreamParms->metaBuffers[i] = metaBuf;
1860                targetStreamParms->svcBufHandle[i]     = registeringBuffers[i];
1861            }
1862        }
1863    }
1864
1865    ALOGV("DEBUG(%s): calling  streamon", __FUNCTION__);
1866    if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
1867        ALOGD("%s(%d), stream id = %d", __FUNCTION__, __LINE__, stream_id);
1868        cam_int_streamon(&(targetStreamParms->node));
1869    }
1870
1871    if (m_need_streamoff == 1) {
1872        if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1873            ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1874        } else {
1875            m_camera_info.capture.status = true;
1876        }
1877
1878        cam_int_streamon(&(m_camera_info.sensor));
1879    }
1880
1881    ALOGV("DEBUG(%s): calling  streamon END", __FUNCTION__);
1882    ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
1883
1884    if(!m_isIspStarted) {
1885        m_isIspStarted = true;
1886        StartISP();
1887    }
1888
1889    if (m_need_streamoff == 1) {
1890        m_requestManager->SetInitialSkip(8);
1891        m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1892        m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
1893    }
1894    m_need_streamoff = 1;
1895
1896    return 0;
1897}
1898
1899int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
1900{
1901    StreamThread *targetStream;
1902    ALOGV("DEBUG(%s):", __FUNCTION__);
1903
1904    if (stream_id == 0) {
1905        targetStream = (StreamThread*)(m_streamThreads[0].get());
1906        m_scp_flushing = true;
1907    }
1908    else if (stream_id == 1) {
1909        targetStream = (StreamThread*)(m_streamThreads[1].get());
1910    }
1911    else if (stream_id == 2 && m_recordingEnabled) {
1912        m_recordingEnabled = false;
1913        m_needsRecordBufferInit = true;
1914        return 0;
1915    }
1916    else if (stream_id == 3 && m_previewCbEnabled) {
1917        m_previewCbEnabled = false;
1918        m_needsPreviewCbBufferInit = true;
1919        return 0;
1920    }
1921    else {
1922        ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
1923        return 1;
1924    }
1925
1926    targetStream->m_releasing = true;
1927    do {
1928        ALOGD("stream thread release %d", __LINE__);
1929        targetStream->release();
1930        usleep(33000);
1931    } while (targetStream->m_releasing);
1932    targetStream->m_activated = false;
1933    ALOGV("DEBUG(%s): DONE", __FUNCTION__);
1934    return 0;
1935}
1936
1937int ExynosCameraHWInterface2::allocateReprocessStream(
1938    uint32_t width, uint32_t height, uint32_t format,
1939    const camera2_stream_in_ops_t *reprocess_stream_ops,
1940    uint32_t *stream_id, uint32_t *consumer_usage, uint32_t *max_buffers)
1941{
1942    ALOGV("DEBUG(%s):", __FUNCTION__);
1943    return 0;
1944}
1945
1946int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
1947{
1948    ALOGV("DEBUG(%s):", __FUNCTION__);
1949    return 0;
1950}
1951
1952int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
1953{
1954    ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2);
1955
1956    switch (trigger_id) {
1957    case CAMERA2_TRIGGER_AUTOFOCUS:
1958        ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
1959        OnAfTriggerStart(ext1);
1960        break;
1961
1962    case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
1963        ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
1964        OnAfCancel(ext1);
1965        break;
1966    default:
1967        break;
1968    }
1969    return 0;
1970}
1971
1972int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
1973{
1974    ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb);
1975    m_notifyCb = notify_cb;
1976    m_callbackCookie = user;
1977    return 0;
1978}
1979
1980int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
1981{
1982    ALOGV("DEBUG(%s):", __FUNCTION__);
1983    return 0;
1984}
1985
1986int ExynosCameraHWInterface2::dump(int fd)
1987{
1988    ALOGV("DEBUG(%s):", __FUNCTION__);
1989    return 0;
1990}
1991
1992void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
1993{
1994    switch (colorFormat) {
1995    // 1p
1996    case V4L2_PIX_FMT_RGB565 :
1997    case V4L2_PIX_FMT_YUYV :
1998    case V4L2_PIX_FMT_UYVY :
1999    case V4L2_PIX_FMT_VYUY :
2000    case V4L2_PIX_FMT_YVYU :
2001        buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
2002        buf->size.extS[1] = 0;
2003        buf->size.extS[2] = 0;
2004        break;
2005    // 2p
2006    case V4L2_PIX_FMT_NV12 :
2007    case V4L2_PIX_FMT_NV12T :
2008    case V4L2_PIX_FMT_NV21 :
2009        buf->size.extS[0] = ALIGN(w,   16) * ALIGN(h,   16);
2010        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
2011        buf->size.extS[2] = 0;
2012        break;
2013    case V4L2_PIX_FMT_NV12M :
2014    case V4L2_PIX_FMT_NV12MT_16X16 :
2015    case V4L2_PIX_FMT_NV21M:
2016        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h,     16);
2017        buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
2018        buf->size.extS[2] = 0;
2019        break;
2020    case V4L2_PIX_FMT_NV16 :
2021    case V4L2_PIX_FMT_NV61 :
2022        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
2023        buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h,  16);
2024        buf->size.extS[2] = 0;
2025        break;
2026     // 3p
2027    case V4L2_PIX_FMT_YUV420 :
2028    case V4L2_PIX_FMT_YVU420 :
2029        buf->size.extS[0] = (w * h);
2030        buf->size.extS[1] = (w * h) >> 2;
2031        buf->size.extS[2] = (w * h) >> 2;
2032        break;
2033    case V4L2_PIX_FMT_YUV420M:
2034    case V4L2_PIX_FMT_YVU420M :
2035    case V4L2_PIX_FMT_YUV422P :
2036        buf->size.extS[0] = ALIGN(w,  32) * ALIGN(h,  16);
2037        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2038        buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2039        break;
2040    default:
2041        ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
2042        return;
2043        break;
2044    }
2045}
2046
2047bool ExynosCameraHWInterface2::m_getRatioSize(int  src_w,  int   src_h,
2048                                             int  dst_w,  int   dst_h,
2049                                             int *crop_x, int *crop_y,
2050                                             int *crop_w, int *crop_h,
2051                                             int zoom)
2052{
2053    *crop_w = src_w;
2054    *crop_h = src_h;
2055
2056    if (   src_w != dst_w
2057        || src_h != dst_h) {
2058        float src_ratio = 1.0f;
2059        float dst_ratio = 1.0f;
2060
2061        // ex : 1024 / 768
2062        src_ratio = (float)src_w / (float)src_h;
2063
2064        // ex : 352  / 288
2065        dst_ratio = (float)dst_w / (float)dst_h;
2066
2067        if (dst_w * dst_h < src_w * src_h) {
2068            if (dst_ratio <= src_ratio) {
2069                // shrink w
2070                *crop_w = src_h * dst_ratio;
2071                *crop_h = src_h;
2072            } else {
2073                // shrink h
2074                *crop_w = src_w;
2075                *crop_h = src_w / dst_ratio;
2076            }
2077        } else {
2078            if (dst_ratio <= src_ratio) {
2079                // shrink w
2080                *crop_w = src_h * dst_ratio;
2081                *crop_h = src_h;
2082            } else {
2083                // shrink h
2084                *crop_w = src_w;
2085                *crop_h = src_w / dst_ratio;
2086            }
2087        }
2088    }
2089
2090    if (zoom != 0) {
2091        float zoomLevel = ((float)zoom + 10.0) / 10.0;
2092        *crop_w = (int)((float)*crop_w / zoomLevel);
2093        *crop_h = (int)((float)*crop_h / zoomLevel);
2094    }
2095
2096    #define CAMERA_CROP_WIDTH_RESTRAIN_NUM  (0x2)
2097    unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
2098    if (w_align != 0) {
2099        if (  (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
2100            && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) {
2101            *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
2102        }
2103        else
2104            *crop_w -= w_align;
2105    }
2106
2107    #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM  (0x2)
2108    unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
2109    if (h_align != 0) {
2110        if (  (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
2111            && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) {
2112            *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
2113        }
2114        else
2115            *crop_h -= h_align;
2116    }
2117
2118    *crop_x = (src_w - *crop_w) >> 1;
2119    *crop_y = (src_h - *crop_h) >> 1;
2120
2121    if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
2122        *crop_x -= 1;
2123
2124    if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
2125        *crop_y -= 1;
2126
2127    return true;
2128}
2129
2130BayerBufManager::BayerBufManager()
2131{
2132    ALOGV("DEBUG(%s): ", __FUNCTION__);
2133    for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) {
2134        entries[i].status = BAYER_ON_HAL_EMPTY;
2135        entries[i].reqFrameCnt = 0;
2136    }
2137    sensorEnqueueHead = 0;
2138    sensorDequeueHead = 0;
2139    ispEnqueueHead = 0;
2140    ispDequeueHead = 0;
2141    numOnSensor = 0;
2142    numOnIsp = 0;
2143    numOnHalFilled = 0;
2144    numOnHalEmpty = NUM_BAYER_BUFFERS;
2145}
2146
2147BayerBufManager::~BayerBufManager()
2148{
2149    ALOGV("%s", __FUNCTION__);
2150}
2151
2152int     BayerBufManager::GetIndexForSensorEnqueue()
2153{
2154    int ret = 0;
2155    if (numOnHalEmpty == 0)
2156        ret = -1;
2157    else
2158        ret = sensorEnqueueHead;
2159    ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret);
2160    return ret;
2161}
2162
2163int    BayerBufManager::MarkSensorEnqueue(int index)
2164{
2165    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2166
2167    // sanity check
2168    if (index != sensorEnqueueHead) {
2169        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead);
2170        return -1;
2171    }
2172    if (entries[index].status != BAYER_ON_HAL_EMPTY) {
2173        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2174            index, entries[index].status, BAYER_ON_HAL_EMPTY);
2175        return -1;
2176    }
2177
2178    entries[index].status = BAYER_ON_SENSOR;
2179    entries[index].reqFrameCnt = 0;
2180    numOnHalEmpty--;
2181    numOnSensor++;
2182    sensorEnqueueHead = GetNextIndex(index);
2183    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2184        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2185    return 0;
2186}
2187
2188int    BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t *timeStamp)
2189{
2190    ALOGV("DEBUG(%s)    : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
2191
2192    if (entries[index].status != BAYER_ON_SENSOR) {
2193        ALOGE("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2194            index, entries[index].status, BAYER_ON_SENSOR);
2195        return -1;
2196    }
2197
2198    entries[index].status = BAYER_ON_HAL_FILLED;
2199    numOnHalFilled++;
2200    numOnSensor--;
2201
2202    return 0;
2203}
2204
2205int     BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt)
2206{
2207    int ret = 0;
2208    if (numOnHalFilled == 0)
2209        ret = -1;
2210    else {
2211        *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt;
2212        ret = ispEnqueueHead;
2213    }
2214    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2215    return ret;
2216}
2217
2218int     BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt)
2219{
2220    int ret = 0;
2221    if (numOnIsp == 0)
2222        ret = -1;
2223    else {
2224        *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt;
2225        ret = ispDequeueHead;
2226    }
2227    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2228    return ret;
2229}
2230
2231int    BayerBufManager::MarkIspEnqueue(int index)
2232{
2233    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2234
2235    // sanity check
2236    if (index != ispEnqueueHead) {
2237        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead);
2238        return -1;
2239    }
2240    if (entries[index].status != BAYER_ON_HAL_FILLED) {
2241        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2242            index, entries[index].status, BAYER_ON_HAL_FILLED);
2243        return -1;
2244    }
2245
2246    entries[index].status = BAYER_ON_ISP;
2247    numOnHalFilled--;
2248    numOnIsp++;
2249    ispEnqueueHead = GetNextIndex(index);
2250    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2251        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2252    return 0;
2253}
2254
2255int    BayerBufManager::MarkIspDequeue(int index)
2256{
2257    ALOGV("DEBUG(%s)    : BayerIndex[%d]", __FUNCTION__, index);
2258
2259    // sanity check
2260    if (index != ispDequeueHead) {
2261        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead);
2262        return -1;
2263    }
2264    if (entries[index].status != BAYER_ON_ISP) {
2265        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2266            index, entries[index].status, BAYER_ON_ISP);
2267        return -1;
2268    }
2269
2270    entries[index].status = BAYER_ON_HAL_EMPTY;
2271    entries[index].reqFrameCnt = 0;
2272    numOnHalEmpty++;
2273    numOnIsp--;
2274    ispDequeueHead = GetNextIndex(index);
2275    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2276        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2277    return 0;
2278}
2279
2280int BayerBufManager::GetNumOnSensor()
2281{
2282    return numOnSensor;
2283}
2284
2285int BayerBufManager::GetNumOnHalFilled()
2286{
2287    return numOnHalFilled;
2288}
2289
2290int BayerBufManager::GetNumOnIsp()
2291{
2292    return numOnIsp;
2293}
2294
2295int     BayerBufManager::GetNextIndex(int index)
2296{
2297    index++;
2298    if (index >= NUM_BAYER_BUFFERS)
2299        index = 0;
2300
2301    return index;
2302}
2303
2304void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
2305{
2306    camera_metadata_t *currentRequest = NULL;
2307    camera_metadata_t *currentFrame = NULL;
2308    size_t numEntries = 0;
2309    size_t frameSize = 0;
2310    camera_metadata_t * preparedFrame = NULL;
2311    camera_metadata_t *deregisteredRequest = NULL;
2312    uint32_t currentSignal = self->GetProcessingSignal();
2313    MainThread *  selfThread      = ((MainThread*)self);
2314    int res = 0;
2315
2316    int ret;
2317
2318    ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
2319
2320    if (currentSignal & SIGNAL_THREAD_RELEASE) {
2321        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2322
2323        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
2324        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2325        return;
2326    }
2327
2328    if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
2329        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
2330        if (m_requestManager->IsRequestQueueFull()==false) {
2331            m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
2332            if (NULL == currentRequest) {
2333                ALOGE("DEBUG(%s)(0x%x): dequeue_request returned NULL ", __FUNCTION__, currentSignal);
2334                m_isRequestQueueNull = true;
2335            }
2336            else {
2337                m_requestManager->RegisterRequest(currentRequest);
2338
2339                m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
2340                ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
2341                if (m_requestManager->IsRequestQueueFull()==false)
2342                    selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
2343
2344                m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2345            }
2346        }
2347        else {
2348            m_isRequestQueuePending = true;
2349        }
2350    }
2351
2352    if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
2353        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
2354        /*while (1)*/ {
2355            ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService());
2356            if (ret == false)
2357                ALOGD("++++++ PrepareFrame ret = %d", ret);
2358
2359            m_requestManager->DeregisterRequest(&deregisteredRequest);
2360
2361            ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
2362            if (ret < 0)
2363                ALOGD("++++++ free_request ret = %d", ret);
2364
2365            ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
2366            if (ret < 0)
2367                ALOGD("++++++ dequeue_frame ret = %d", ret);
2368
2369            if (currentFrame==NULL) {
2370                ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
2371            }
2372            else {
2373                ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize);
2374            }
2375            res = append_camera_metadata(currentFrame, preparedFrame);
2376            if (res==0) {
2377                ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
2378                m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
2379            }
2380            else {
2381                ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
2382            }
2383        }
2384        if (!m_isRequestQueueNull) {
2385            selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
2386        }
2387
2388        if (getInProgressCount()>0) {
2389            ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
2390            m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2391        }
2392    }
2393    ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
2394    return;
2395}
2396
2397void ExynosCameraHWInterface2::m_sensorThreadInitialize(SignalDrivenThread * self)
2398{
2399    ALOGV("DEBUG(%s): ", __FUNCTION__ );
2400    /* will add */
2401    return;
2402}
2403
2404
2405void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
2406{
2407    ALOGD("####  common Section");
2408    ALOGD("####                 magic(%x) ",
2409        shot_ext->shot.magicNumber);
2410    ALOGD("####  ctl Section");
2411    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
2412        shot_ext->shot.ctl.request.metadataMode,
2413        shot_ext->shot.ctl.lens.aperture,
2414        shot_ext->shot.ctl.sensor.exposureTime,
2415        shot_ext->shot.ctl.sensor.frameDuration,
2416        shot_ext->shot.ctl.sensor.sensitivity,
2417        shot_ext->shot.ctl.aa.awbMode);
2418
2419    ALOGD("####                 OutputStream Sensor(%d) SCP(%d) SCC(%d) pv(%d) rec(%d) previewCb(%d)",
2420        shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
2421        shot_ext->shot.ctl.request.outputStreams[0], shot_ext->shot.ctl.request.outputStreams[2],
2422        shot_ext->shot.ctl.request.outputStreams[3]);
2423
2424    ALOGD("####  DM Section");
2425    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
2426        shot_ext->shot.dm.request.metadataMode,
2427        shot_ext->shot.dm.lens.aperture,
2428        shot_ext->shot.dm.sensor.exposureTime,
2429        shot_ext->shot.dm.sensor.frameDuration,
2430        shot_ext->shot.dm.sensor.sensitivity,
2431        shot_ext->shot.dm.sensor.timeStamp,
2432        shot_ext->shot.dm.aa.awbMode,
2433        shot_ext->shot.dm.request.frameCount );
2434}
2435
2436void ExynosCameraHWInterface2::flashSetter(struct camera2_shot_ext * shot_ext)
2437{
2438    // 1. AF Flash
2439    if (m_afFlashEnableFlg) {
2440        switch (m_afFlashCnt) {
2441        case IS_FLASH_AF_ON:
2442            ALOGE("(%s): [AF Flash] IS_FLASH_ON", __FUNCTION__);
2443            shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2444            m_afFlashCnt = IS_FLASH_AF_ON_START;
2445        break;
2446        case IF_FLASH_AF_OFF:
2447            ALOGE("(%s): [AF Flash] IS_FLASH_OFF and status clear", __FUNCTION__);
2448            shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2449            m_afFlashEnableFlg = false;
2450            m_afFlashCnt = 0;
2451        break;
2452        }
2453    }
2454
2455    // 2. Flash
2456    if (m_flashEnableFlg) {
2457        switch (m_flashCnt) {
2458        case IS_FLASH_ON:
2459            ALOGE("(%s): [Flash] Flash ON for Capture", __FUNCTION__);
2460            shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2461            shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_WB_AUTO;
2462            break;
2463        case IS_FLASH_ON_DONE:
2464            shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
2465            shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO;
2466            m_flashCnt = IS_FLASH_AUTO_AE_AWB_LOCK; // auto transition
2467            shot_ext->request_scc = 0;
2468            break;
2469        case IS_FLASH_AUTO_AE_AWB_LOCK:
2470            shot_ext->request_scc = 0;
2471            break;
2472        case IS_FLASH_AUTO_END:
2473            shot_ext->request_scc = 0;
2474            break;
2475        case IS_FLASH_AUTO_AE_AWB_LOCKED_AUTO_END:
2476            m_flashCnt = IS_FLASH_CAPTURE;
2477            shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_CAPTURE;
2478            shot_ext->request_scc = 1;
2479            break;
2480        case IS_FLASH_CAPTURE:
2481            shot_ext->request_scc = 0;
2482            break;
2483        case IS_FLASH_CAPTURE_JPEG:
2484            shot_ext->request_scc = 0;
2485            m_flashCaptured = true;
2486            m_flashEnableFlg = false;
2487            m_flashCnt = 0;
2488            break;
2489        }
2490    }
2491}
2492
2493void ExynosCameraHWInterface2::flashListener(struct camera2_shot_ext * shot_ext)
2494{
2495    if (m_flashCaptured) {
2496        shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2497        shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_WB_AUTO;
2498        ALOGE("(%s): [Flash] Flash off after Capture", __FUNCTION__);
2499        m_flashCaptured = false;
2500    }
2501
2502    // 1. AF Flash
2503    if (m_afFlashEnableFlg) {
2504        ALOGV("(%s), [AF Flash] aa.aeflashMode : (ctl,dm) - (%d , %d)", __FUNCTION__, shot_ext->shot.ctl.aa.aeflashMode, shot_ext->shot.dm.aa.aeflashMode);
2505        ALOGV("(%s), [AF Flash] flash.flashMode : (ctl,dm) - (%d , %d)", __FUNCTION__, shot_ext->shot.ctl.flash.flashMode, shot_ext->shot.dm.flash.flashMode);
2506
2507        switch (m_afFlashCnt) {
2508        case IS_FLASH_AF_ON_START:
2509            if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_TORCH) {
2510                m_afFlashCnt = IS_FLASH_AF_ON_DONE;
2511                ALOGV("(%s): [AF Flash] Lis : dm.aa.aeflashMode == AA_FLASHMODE_ON", __FUNCTION__);
2512                m_IsAfTriggerRequired = true;
2513            } else {
2514            if (m_flashTimeOut == 0)
2515                m_afFlashCnt = IS_FLASH_AF_ON_DONE;
2516            else
2517                m_flashTimeOut--;
2518            }
2519            break;
2520        case IF_FLASH_AF_OFF:
2521            ALOGD("(%s): [AF Flash] Lis :  IS_FLASH_OFF : status clear", __FUNCTION__);
2522            break;
2523        }
2524    }
2525
2526    // 2. Flash
2527    if (m_flashEnableFlg) {
2528        switch (m_flashCnt) {
2529        case IS_FLASH_ON:
2530            if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_TORCH) {
2531            m_flashCnt = IS_FLASH_ON_DONE;
2532            } else {
2533            ALOGV("(%s): [Flash] Waiting : dm.flash.flashMode == ANDROID_FLASH_TORCH", __FUNCTION__);
2534            m_flashTimeOut--;
2535            if (m_flashTimeOut == 0)
2536                m_flashCnt = IS_FLASH_CAPTURE_JPEG;
2537            }
2538            break;
2539        case IS_FLASH_AUTO_AE_AWB_LOCK:
2540            if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_TORCH) {
2541            m_flashCnt = IS_FLASH_AUTO_END;
2542            } else {
2543            ALOGV("(%s):  [Flash] Waiting : dm.flash.flashMode== AA_FLASHMODE_AUTO", __FUNCTION__);
2544            }
2545            break;
2546        case IS_FLASH_AUTO_END:
2547            if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_OFF) {
2548                m_flashCnt = IS_FLASH_AUTO_AE_AWB_LOCKED_AUTO_END;
2549            } else {
2550                ALOGV("(%s):  [Flash] Waiting : dm.aa.awbMode == AA_AWBMODE_LOCKED and dm.flash.flashMode== CAM2_FLASH_MODE_OFF", __FUNCTION__);
2551            }
2552            break;
2553        case IS_FLASH_CAPTURE:
2554            if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_SINGLE) {
2555                m_flashCnt = IS_FLASH_CAPTURE_JPEG;
2556            } else {
2557                ALOGV("(%s):  [Flash] Waiting : ctl.aa.aeflashMode == AA_FLASHMODE_CAPTURE", __FUNCTION__);
2558            }
2559            break;
2560        }
2561    }
2562
2563}
2564
2565void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
2566{
2567    uint32_t        currentSignal = self->GetProcessingSignal();
2568    SensorThread *  selfThread      = ((SensorThread*)self);
2569    int index;
2570    int index_isp;
2571    status_t res;
2572    nsecs_t frameTime;
2573    int bayersOnSensor = 0, bayersOnIsp = 0;
2574    int j = 0;
2575    bool isCapture = false;
2576    ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
2577
2578    if (currentSignal & SIGNAL_THREAD_RELEASE) {
2579        ALOGV("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2580
2581        ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
2582        cam_int_streamoff(&(m_camera_info.sensor));
2583        ALOGV("(%s): calling sensor streamoff done", __FUNCTION__);
2584
2585        m_camera_info.sensor.buffers = 0;
2586        ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__);
2587        cam_int_reqbufs(&(m_camera_info.sensor));
2588        ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__);
2589
2590        ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
2591        isp_int_streamoff(&(m_camera_info.isp));
2592        ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
2593
2594        m_camera_info.isp.buffers = 0;
2595        ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__);
2596        cam_int_reqbufs(&(m_camera_info.isp));
2597        ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__);
2598
2599        exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
2600
2601        ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2602        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2603        return;
2604    }
2605
2606    if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
2607    {
2608        ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
2609        int targetStreamIndex = 0, i=0;
2610        int matchedFrameCnt = -1, processingReqIndex;
2611        struct camera2_shot_ext *shot_ext;
2612        struct camera2_shot_ext *shot_ext_capture;
2613        bool triggered = false;
2614        int afMode;
2615
2616        /* dqbuf from sensor */
2617        ALOGV("Sensor DQbuf start");
2618        index = cam_int_dqbuf(&(m_camera_info.sensor));
2619        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
2620
2621        m_recordOutput = shot_ext->shot.ctl.request.outputStreams[2];
2622        m_previewCbOutput = shot_ext->shot.ctl.request.outputStreams[3];
2623
2624        if (m_nightCaptureCnt != 0) {
2625            matchedFrameCnt = m_nightCaptureFrameCnt;
2626        } else if (m_flashCnt != 0) {
2627            matchedFrameCnt = m_flashFrameCount;
2628            ALOGD("Skip frame, request is fixed at %d", matchedFrameCnt);
2629        } else {
2630            matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
2631        }
2632
2633        if (matchedFrameCnt != -1) {
2634            frameTime = systemTime();
2635            m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
2636            m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt);
2637            if (m_afModeWaitingCnt != 0) {
2638                ALOGV("### Af Trigger pulled, waiting for mode change cnt(%d) ", m_afModeWaitingCnt);
2639                m_afModeWaitingCnt --;
2640                if (m_afModeWaitingCnt == 1) {
2641                    m_afModeWaitingCnt = 0;
2642                    OnAfTrigger(m_afPendingTriggerId);
2643                }
2644            }
2645            float zoomRatio = m_camera2->getSensorW() / shot_ext->shot.ctl.scaler.cropRegion[2];
2646            float zoomLeft, zoomTop, zoomWidth, zoomHeight;
2647            int crop_x = 0, crop_y = 0, crop_w = 0, crop_h = 0;
2648
2649            m_getRatioSize(m_camera2->getSensorW(), m_camera2->getSensorH(),
2650                           m_streamThreads[0]->m_parameters.outputWidth, m_streamThreads[0]->m_parameters.outputHeight,
2651                           &crop_x, &crop_y,
2652                           &crop_w, &crop_h,
2653                           0);
2654
2655            if (m_streamThreads[0]->m_parameters.outputWidth >= m_streamThreads[0]->m_parameters.outputHeight) {
2656                zoomWidth =  m_camera2->getSensorW() / zoomRatio;
2657                zoomHeight = zoomWidth *
2658                        m_streamThreads[0]->m_parameters.outputHeight / m_streamThreads[0]->m_parameters.outputWidth;
2659            } else {
2660                zoomHeight = m_camera2->getSensorH() / zoomRatio;
2661                zoomWidth = zoomHeight *
2662                        m_streamThreads[0]->m_parameters.outputWidth / m_streamThreads[0]->m_parameters.outputHeight;
2663            }
2664            zoomLeft = (crop_w - zoomWidth) / 2;
2665            zoomTop = (crop_h - zoomHeight) / 2;
2666
2667            int32_t new_cropRegion[3] = { zoomLeft, zoomTop, zoomWidth };
2668
2669            shot_ext->shot.ctl.scaler.cropRegion[0] = new_cropRegion[0];
2670            shot_ext->shot.ctl.scaler.cropRegion[1] = new_cropRegion[1];
2671            shot_ext->shot.ctl.scaler.cropRegion[2] = new_cropRegion[2];
2672            if (m_IsAfModeUpdateRequired) {
2673                ALOGE("### AF Mode change(Mode %d) ", m_afMode);
2674                shot_ext->shot.ctl.aa.afMode = m_afMode;
2675                if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
2676                    ALOGE("### With Automatic triger for continuous modes");
2677                    m_afState = HAL_AFSTATE_STARTED;
2678                    shot_ext->shot.ctl.aa.afTrigger = 1;
2679                    triggered = true;
2680                }
2681                m_IsAfModeUpdateRequired = false;
2682                if (m_afMode2 != NO_CHANGE) {
2683                    enum aa_afmode tempAfMode = m_afMode2;
2684                    m_afMode2 = NO_CHANGE;
2685                    SetAfMode(tempAfMode);
2686                }
2687            }
2688            else {
2689                shot_ext->shot.ctl.aa.afMode = NO_CHANGE;
2690            }
2691            if (m_IsAfTriggerRequired) {
2692                ALOGE("### AF Triggering with mode (%d)", m_afMode);
2693                // Flash triggering with AF
2694                if ((shot_ext->shot.ctl.aa.aeMode >= AA_AEMODE_ON_AUTO_FLASH) && (m_afFlashEnableFlg == false)
2695                        && (m_cameraId == 0)) {
2696                    m_afFlashEnableFlg = true;
2697                    m_flashTimeOut = 3;
2698                    m_afFlashCnt = IS_FLASH_AF_ON;
2699                }
2700                if (m_afFlashEnableFlg) {
2701                    if (m_afFlashCnt == IS_FLASH_AF_ON_DONE) {
2702                        // Flash is enabled and start AF
2703                        if (m_afState == HAL_AFSTATE_SCANNING) {
2704                            ALOGE("(%s): restarting trigger ", __FUNCTION__);
2705                        } else {
2706                            if (m_afState != HAL_AFSTATE_NEEDS_COMMAND)
2707                                ALOGE("(%s): wrong trigger state %d", __FUNCTION__, m_afState);
2708                            else
2709                                m_afState = HAL_AFSTATE_STARTED;
2710                        }
2711                        shot_ext->shot.ctl.aa.afTrigger = 1;
2712                        shot_ext->shot.ctl.aa.afMode = m_afMode;
2713                        m_IsAfTriggerRequired = false;
2714                    }
2715                } else {
2716                    if (m_afState == HAL_AFSTATE_SCANNING) {
2717                        ALOGE("(%s): restarting trigger ", __FUNCTION__);
2718                    } else {
2719                        if (m_afState != HAL_AFSTATE_NEEDS_COMMAND)
2720                            ALOGE("(%s): wrong trigger state %d", __FUNCTION__, m_afState);
2721                        else
2722                            m_afState = HAL_AFSTATE_STARTED;
2723                    }
2724                    shot_ext->shot.ctl.aa.afTrigger = 1;
2725                    shot_ext->shot.ctl.aa.afMode = m_afMode;
2726                    m_IsAfTriggerRequired = false;
2727                }
2728            }
2729            else {
2730                shot_ext->shot.ctl.aa.afTrigger = 0;
2731            }
2732            if (m_wideAspect) {
2733                shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
2734                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
2735                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
2736            }
2737            else {
2738                shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
2739            }
2740            if (m_wideAspect) {
2741//                shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
2742                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
2743                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
2744            }
2745            else {
2746//                shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
2747            }
2748            if (triggered)
2749                shot_ext->shot.ctl.aa.afTrigger = 1;
2750
2751            // TODO : check collision with AFMode Update
2752            if (m_IsAfLockRequired) {
2753                shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF;
2754                m_IsAfLockRequired = false;
2755            }
2756            ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)",
2757                index,
2758                shot_ext->shot.ctl.request.frameCount,
2759                shot_ext->request_scp,
2760                shot_ext->request_scc,
2761                shot_ext->dis_bypass, sizeof(camera2_shot));
2762            ALOGV("### m_nightCaptureCnt (%d)", m_nightCaptureCnt);
2763            if (0 == shot_ext->shot.ctl.aa.afRegions[0] && 0 == shot_ext->shot.ctl.aa.afRegions[1]
2764                && 0 == shot_ext->shot.ctl.aa.afRegions[2] && 0 == shot_ext->shot.ctl.aa.afRegions[3]) {
2765                ALOGV("(%s): AF region resetting", __FUNCTION__);
2766                lastAfRegion[0] = 0;
2767                lastAfRegion[1] = 0;
2768                lastAfRegion[2] = 0;
2769                lastAfRegion[3] = 0;
2770            }
2771            else {
2772                if (!(lastAfRegion[0] == shot_ext->shot.ctl.aa.afRegions[0] && lastAfRegion[1] == shot_ext->shot.ctl.aa.afRegions[1]
2773                        && lastAfRegion[2] == shot_ext->shot.ctl.aa.afRegions[2] && lastAfRegion[3] == shot_ext->shot.ctl.aa.afRegions[3])) {
2774                    ALOGV("(%s): AF region changed : triggering", __FUNCTION__);
2775                    shot_ext->shot.ctl.aa.afTrigger = 1;
2776                    shot_ext->shot.ctl.aa.afMode = m_afMode;
2777                    m_afState = HAL_AFSTATE_STARTED;
2778                    lastAfRegion[0] = shot_ext->shot.ctl.aa.afRegions[0];
2779                    lastAfRegion[1] = shot_ext->shot.ctl.aa.afRegions[1];
2780                    lastAfRegion[2] = shot_ext->shot.ctl.aa.afRegions[2];
2781                    lastAfRegion[3] = shot_ext->shot.ctl.aa.afRegions[3];
2782                }
2783            }
2784            if (m_nightCaptureCnt == 0) {
2785                if (shot_ext->shot.ctl.aa.captureIntent == ANDROID_CONTROL_INTENT_STILL_CAPTURE
2786                        && shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
2787                    shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
2788                    shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
2789                    shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
2790                    m_nightCaptureCnt = 4;
2791                    m_nightCaptureFrameCnt = matchedFrameCnt;
2792                    shot_ext->request_scc = 0;
2793                }
2794            }
2795            else if (m_nightCaptureCnt == 1) {
2796                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
2797                    shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
2798                    shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
2799                m_nightCaptureCnt--;
2800                shot_ext->request_scc = 1;
2801            }
2802            else if (m_nightCaptureCnt == 2 || m_nightCaptureCnt == 3 || m_nightCaptureCnt == 4) {
2803                shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
2804                    shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
2805                    shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
2806                m_nightCaptureCnt--;
2807                shot_ext->request_scc = 0;
2808            }
2809
2810            // Flash mode
2811            // Keep and Skip request_scc = 1 at flash enable mode to operate flash sequence
2812            if ((shot_ext->shot.ctl.aa.aeMode >= AA_AEMODE_ON_AUTO_FLASH) && (m_flashEnableFlg == false)
2813                    && (m_cameraId == 0)) {
2814                if (shot_ext->shot.ctl.aa.captureIntent == ANDROID_CONTROL_INTENT_STILL_CAPTURE) {
2815                    ALOGE("(%s): [Flash] Flash capture start : skip request scc 1#####", __FUNCTION__);
2816                    shot_ext->request_scc = 0;
2817                    m_flashFrameCount = matchedFrameCnt;
2818                    m_flashEnableFlg = true;
2819                    m_flashCaptured = false;
2820                    m_flashCnt = IS_FLASH_ON;
2821                    m_flashTimeOut = 3;
2822                }
2823            }
2824
2825            flashListener(shot_ext);
2826            flashSetter(shot_ext);
2827            ALOGV("(%s): queued  aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", __FUNCTION__,
2828            (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
2829            (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
2830            (int)(shot_ext->shot.ctl.aa.afTrigger));
2831            cam_int_qbuf(&(m_camera_info.isp), index);
2832
2833            usleep(10000);
2834
2835            ALOGV("### isp DQBUF start");
2836            index_isp = cam_int_dqbuf(&(m_camera_info.isp));
2837
2838
2839            shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
2840
2841            ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) shot_size(%d)",
2842                index,
2843                shot_ext->shot.ctl.request.frameCount,
2844                shot_ext->request_scp,
2845                shot_ext->request_scc,
2846                shot_ext->dis_bypass, sizeof(camera2_shot));
2847            ALOGV("(%s): DM aa(%d) aemode(%d) awb(%d) afmode(%d)", __FUNCTION__,
2848                (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
2849                (int)(shot_ext->shot.dm.aa.awbMode),
2850                (int)(shot_ext->shot.dm.aa.afMode));
2851
2852            m_previewOutput = 0;
2853            if (shot_ext->request_scp) {
2854                m_previewOutput = 1;
2855                m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
2856            }
2857            if (shot_ext->request_scc) {
2858                 ALOGV("### m_nightCaptureCnt (%d) request_scc true", m_nightCaptureCnt);
2859                memcpy(&m_jpegMetadata, &shot_ext->shot, sizeof(struct camera2_shot));
2860                int shutterSpeed = (m_jpegMetadata.dm.sensor.exposureTime/1000);
2861
2862                if (shutterSpeed < 0) {
2863                    shutterSpeed = 100;
2864                }
2865                m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
2866            }
2867
2868            ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
2869               shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
2870            if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
2871                ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
2872                m_scp_closed = true;
2873            }
2874            else
2875                m_scp_closed = false;
2876
2877            if (!shot_ext->fd_bypass) {
2878                /* FD orientation axis transformation */
2879                for (int i=0; i < CAMERA2_MAX_FACES; i++) {
2880                    if (shot_ext->shot.dm.stats.faceRectangles[i][0] > 0)
2881                        shot_ext->shot.dm.stats.faceRectangles[i][0] = (m_camera2->m_curCameraInfo->sensorW
2882                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][0])
2883                                                                                                / m_streamThreads[0].get()->m_parameters.outputWidth;
2884                    if (shot_ext->shot.dm.stats.faceRectangles[i][1] > 0)
2885                        shot_ext->shot.dm.stats.faceRectangles[i][1] = (m_camera2->m_curCameraInfo->sensorH
2886                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][1])
2887                                                                                                / m_streamThreads[0].get()->m_parameters.outputHeight;
2888                    if (shot_ext->shot.dm.stats.faceRectangles[i][2] > 0)
2889                        shot_ext->shot.dm.stats.faceRectangles[i][2] = (m_camera2->m_curCameraInfo->sensorW
2890                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][2])
2891                                                                                                / m_streamThreads[0].get()->m_parameters.outputWidth;
2892                    if (shot_ext->shot.dm.stats.faceRectangles[i][3] > 0)
2893                        shot_ext->shot.dm.stats.faceRectangles[i][3] = (m_camera2->m_curCameraInfo->sensorH
2894                                                                                                * shot_ext->shot.dm.stats.faceRectangles[i][3])
2895                                                                                                / m_streamThreads[0].get()->m_parameters.outputHeight;
2896                }
2897            }
2898            if (m_nightCaptureCnt == 0 && m_flashCnt == 0) {
2899                m_requestManager->ApplyDynamicMetadata(shot_ext);
2900            }
2901            OnAfNotification(shot_ext->shot.dm.aa.afState);
2902        }
2903
2904        processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]), &afMode);
2905        if (processingReqIndex != -1)
2906            SetAfMode((enum aa_afmode)afMode);
2907
2908
2909        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
2910        if (m_scp_closing || m_scp_closed) {
2911            ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
2912            shot_ext->request_scc = 0;
2913            shot_ext->request_scp = 0;
2914            shot_ext->request_sensor = 0;
2915        }
2916
2917        cam_int_qbuf(&(m_camera_info.sensor), index);
2918        ALOGV("### Sensor QBUF done");
2919
2920        if (!m_scp_closing
2921            && ((matchedFrameCnt == -1) || (processingReqIndex == -1))){
2922            ALOGD("make bubble shot: matchedFramcnt(%d) processingReqIndex(%d)",
2923                                    matchedFrameCnt, processingReqIndex);
2924            selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2925        }
2926    }
2927    return;
2928}
2929
2930void ExynosCameraHWInterface2::m_ispThreadInitialize(SignalDrivenThread * self)
2931{
2932    ALOGV("DEBUG(%s): ", __FUNCTION__ );
2933    /* will add */
2934    return;
2935}
2936
2937
2938void ExynosCameraHWInterface2::m_ispThreadFunc(SignalDrivenThread * self)
2939{
2940     ALOGV("DEBUG(%s): ", __FUNCTION__ );
2941    /* will add */
2942    return;
2943}
2944
2945void ExynosCameraHWInterface2::m_streamBufferInit(SignalDrivenThread *self)
2946{
2947    uint32_t                currentSignal   = self->GetProcessingSignal();
2948    StreamThread *          selfThread      = ((StreamThread*)self);
2949    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
2950    record_parameters_t     *selfRecordParms =  &(selfThread->m_recordParameters);
2951    callback_parameters_t   *selfPreviewCbParms =  &(selfThread->m_previewCbParameters);
2952    node_info_t             *currentNode    = &(selfStreamParms->node);
2953
2954    buffer_handle_t * buf = NULL;
2955    status_t res;
2956    void *virtAddr[3];
2957    int i, j;
2958    int index;
2959    nsecs_t timestamp;
2960
2961    if (!(selfThread->m_isBufferInit))
2962    {
2963        for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
2964            res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
2965            if (res != NO_ERROR || buf == NULL) {
2966                ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
2967                return;
2968            }
2969            ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
2970               ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
2971
2972            if (m_grallocHal->lock(m_grallocHal, *buf,
2973                       selfStreamParms->usage,
2974                       0, 0, selfStreamParms->outputWidth, selfStreamParms->outputHeight, virtAddr) != 0) {
2975                ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
2976                return;
2977            }
2978            ALOGV("DEBUG(%s): locked img buf plane0(%x) plane1(%x) plane2(%x)",
2979            __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
2980
2981            index = selfThread->findBufferIndex(virtAddr[0]);
2982            if (index == -1) {
2983                ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
2984            }
2985            else {
2986                ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
2987                    __FUNCTION__, index, selfStreamParms->svcBufStatus[index]);
2988                if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC)
2989                    selfStreamParms->svcBufStatus[index] = ON_DRIVER;
2990                else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE)
2991                    selfStreamParms->svcBufStatus[index] = ON_HAL;
2992                else {
2993                    ALOGV("DBG(%s): buffer status abnormal (%d) "
2994                        , __FUNCTION__, selfStreamParms->svcBufStatus[index]);
2995                }
2996                selfStreamParms->numSvcBufsInHal++;
2997                if (*buf != selfStreamParms->svcBufHandle[index])
2998                    ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
2999                else
3000                    ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
3001            }
3002            selfStreamParms->svcBufIndex = 0;
3003        }
3004        selfThread->m_isBufferInit = true;
3005    }
3006
3007    if (m_recordingEnabled && m_needsRecordBufferInit) {
3008        ALOGV("DEBUG(%s): Recording Buffer Initialization numsvcbuf(%d)",
3009            __FUNCTION__, selfRecordParms->numSvcBuffers);
3010        int checkingIndex = 0;
3011        bool found = false;
3012        for ( i=0 ; i < selfRecordParms->numSvcBuffers; i++) {
3013            res = selfRecordParms->streamOps->dequeue_buffer(selfRecordParms->streamOps, &buf);
3014            if (res != NO_ERROR || buf == NULL) {
3015                ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3016                return;
3017            }
3018            selfRecordParms->numSvcBufsInHal++;
3019            ALOGV("DEBUG(%s): [record] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3020               selfRecordParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3021
3022            if (m_grallocHal->lock(m_grallocHal, *buf,
3023                   selfRecordParms->usage, 0, 0,
3024                   selfRecordParms->outputWidth, selfRecordParms->outputHeight, virtAddr) != 0) {
3025                ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
3026            }
3027            else {
3028                  ALOGV("DEBUG(%s): [record] locked img buf plane0(%x) plane1(%x) plane2(%x)",
3029                    __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
3030            }
3031            found = false;
3032            for (checkingIndex = 0; checkingIndex < selfRecordParms->numSvcBuffers ; checkingIndex++) {
3033                if (selfRecordParms->svcBufHandle[checkingIndex] == *buf ) {
3034                    found = true;
3035                    break;
3036                }
3037            }
3038            ALOGV("DEBUG(%s): [record] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
3039            if (!found) break;
3040
3041            index = checkingIndex;
3042
3043            if (index == -1) {
3044                ALOGV("ERR(%s): could not find buffer index", __FUNCTION__);
3045            }
3046            else {
3047                ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3048                    __FUNCTION__, index, selfRecordParms->svcBufStatus[index]);
3049                if (selfRecordParms->svcBufStatus[index]== ON_SERVICE)
3050                    selfRecordParms->svcBufStatus[index] = ON_HAL;
3051                else {
3052                    ALOGV("DBG(%s): buffer status abnormal (%d) "
3053                        , __FUNCTION__, selfRecordParms->svcBufStatus[index]);
3054                }
3055                if (*buf != selfRecordParms->svcBufHandle[index])
3056                    ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
3057                else
3058                    ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
3059            }
3060            selfRecordParms->svcBufIndex = 0;
3061        }
3062        m_needsRecordBufferInit = false;
3063    }
3064        if (m_previewCbEnabled && m_needsPreviewCbBufferInit) {
3065            ALOGV("DEBUG(%s): previewCb Buffer Initialization numsvcbuf(%d)",
3066                __FUNCTION__, selfPreviewCbParms->numSvcBuffers);
3067            int checkingIndex = 0;
3068            bool found = false;
3069
3070            m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(selfPreviewCbParms->internalFormat), selfPreviewCbParms->outputWidth,
3071                selfPreviewCbParms->outputHeight, &m_previewCbBuf);
3072            ALOGV("(%s): PreviewCb tempbuf size : %d %d %d", __FUNCTION__, m_previewCbBuf.size.extS[0],
3073                m_previewCbBuf.size.extS[1], m_previewCbBuf.size.extS[2]);
3074
3075            if (allocCameraMemory(selfStreamParms->ionClient, &m_previewCbBuf, selfPreviewCbParms->internalPlanes) == -1) {
3076                ALOGE("ERR(%s): Failed to allocate previewcb buf", __FUNCTION__);
3077            }
3078
3079            for ( i=0 ; i < selfPreviewCbParms->numSvcBuffers; i++) {
3080                res = selfPreviewCbParms->streamOps->dequeue_buffer(selfPreviewCbParms->streamOps, &buf);
3081                if (res != NO_ERROR || buf == NULL) {
3082                    ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3083                    return;
3084                }
3085                selfPreviewCbParms->numSvcBufsInHal++;
3086                ALOGV("DEBUG(%s): [previewCb] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3087                   selfPreviewCbParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3088
3089                if (m_grallocHal->lock(m_grallocHal, *buf,
3090                       selfPreviewCbParms->usage, 0, 0,
3091                       selfPreviewCbParms->outputWidth, selfPreviewCbParms->outputHeight, virtAddr) != 0) {
3092                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
3093                }
3094                else {
3095                      ALOGV("DEBUG(%s): [previewCb] locked img buf plane0(%x) plane1(%x) plane2(%x)",
3096                        __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
3097                }
3098                found = false;
3099                for (checkingIndex = 0; checkingIndex < selfPreviewCbParms->numSvcBuffers ; checkingIndex++) {
3100                    if (selfPreviewCbParms->svcBufHandle[checkingIndex] == *buf ) {
3101                        found = true;
3102                        break;
3103                    }
3104                }
3105                ALOGV("DEBUG(%s): [previewCb] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
3106                if (!found) break;
3107
3108                index = checkingIndex;
3109
3110                if (index == -1) {
3111                    ALOGV("ERR(%s): could not find buffer index", __FUNCTION__);
3112                }
3113                else {
3114                    ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3115                        __FUNCTION__, index, selfPreviewCbParms->svcBufStatus[index]);
3116                    if (selfPreviewCbParms->svcBufStatus[index]== ON_SERVICE)
3117                        selfPreviewCbParms->svcBufStatus[index] = ON_HAL;
3118                    else {
3119                        ALOGV("DBG(%s): buffer status abnormal (%d) "
3120                            , __FUNCTION__, selfPreviewCbParms->svcBufStatus[index]);
3121                    }
3122                    if (*buf != selfPreviewCbParms->svcBufHandle[index])
3123                        ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
3124                    else
3125                        ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
3126                }
3127                selfPreviewCbParms->svcBufIndex = 0;
3128            }
3129            m_needsPreviewCbBufferInit = false;
3130        }
3131
3132}
3133
3134void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self)
3135{
3136    StreamThread *          selfThread      = ((StreamThread*)self);
3137    ALOGV("DEBUG(%s): ", __FUNCTION__ );
3138    memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t));
3139    selfThread->m_isBufferInit = false;
3140
3141    return;
3142}
3143
3144void ExynosCameraHWInterface2::m_streamFunc0(SignalDrivenThread *self)
3145{
3146    uint32_t                currentSignal   = self->GetProcessingSignal();
3147    StreamThread *          selfThread      = ((StreamThread*)self);
3148    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3149    record_parameters_t     *selfRecordParms =  &(selfThread->m_recordParameters);
3150    callback_parameters_t   *selfPreviewCbParms =  &(selfThread->m_previewCbParameters);
3151    node_info_t             *currentNode    = &(selfStreamParms->node);
3152
3153    if (currentSignal & SIGNAL_STREAM_CHANGE_PARAMETER) {
3154        ALOGV("DEBUG(%s): processing SIGNAL_STREAM_CHANGE_PARAMETER", __FUNCTION__);
3155
3156        ALOGV("DEBUG(%s): processing SIGNAL_STREAM_CHANGE_PARAMETER DONE", __FUNCTION__);
3157    }
3158
3159    if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
3160        buffer_handle_t * buf = NULL;
3161        status_t res;
3162        void *virtAddr[3];
3163        int i, j;
3164        int index;
3165        nsecs_t timestamp;
3166        camera2_stream *frame;
3167
3168        ALOGV("DEBUG(%s): stream(%d) processing SIGNAL_STREAM_DATA_COMING",
3169            __FUNCTION__,selfThread->m_index);
3170
3171        m_streamBufferInit(self);
3172
3173        do {
3174            ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF START ",__FUNCTION__,
3175                selfThread->m_index, selfStreamParms->streamType);
3176
3177#ifdef ENABLE_FRAME_SYNC
3178            index = cam_int_dqbuf(&(selfStreamParms->node), selfStreamParms->nodePlanes + selfStreamParms->metaPlanes);
3179            frame = (struct camera2_stream *)(selfStreamParms->metaBuffers[index].virt.extP[0]);
3180            ALOGD("frame count(SCP) : %d", frame->fcount);
3181#else
3182            index = cam_int_dqbuf(&(selfStreamParms->node));
3183#endif
3184            ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
3185                selfThread->m_index, selfStreamParms->streamType, index);
3186
3187            if (selfStreamParms->svcBufStatus[index] !=  ON_DRIVER)
3188                ALOGV("DBG(%s): DQed buffer status abnormal (%d) ",
3189                       __FUNCTION__, selfStreamParms->svcBufStatus[index]);
3190            selfStreamParms->svcBufStatus[index] = ON_HAL;
3191
3192            if (m_recordOutput && m_recordingEnabled) {
3193                ALOGV("DEBUG(%s): Entering record frame creator, index(%d)",__FUNCTION__, selfRecordParms->svcBufIndex);
3194                bool found = false;
3195                for (int i = 0 ; selfRecordParms->numSvcBuffers ; i++) {
3196                    if (selfRecordParms->svcBufStatus[selfRecordParms->svcBufIndex] == ON_HAL) {
3197                        found = true;
3198                        break;
3199                    }
3200                    selfRecordParms->svcBufIndex++;
3201                    if (selfRecordParms->svcBufIndex >= selfRecordParms->numSvcBuffers)
3202                        selfRecordParms->svcBufIndex = 0;
3203                }
3204                if (!found) {
3205                    ALOGE("(%s): cannot find free recording buffer", __FUNCTION__);
3206                    selfRecordParms->svcBufIndex++;
3207                    break;
3208                }
3209
3210                if (m_exynosVideoCSC) {
3211                    int videoW = selfRecordParms->outputWidth, videoH = selfRecordParms->outputHeight;
3212                    int cropX, cropY, cropW, cropH = 0;
3213                    int previewW = selfStreamParms->outputWidth, previewH = selfStreamParms->outputHeight;
3214                    m_getRatioSize(previewW, previewH,
3215                                   videoW, videoH,
3216                                   &cropX, &cropY,
3217                                   &cropW, &cropH,
3218                                   0);
3219
3220                    ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
3221                             __FUNCTION__, cropX, cropY, cropW, cropH);
3222
3223                    csc_set_src_format(m_exynosVideoCSC,
3224                                       previewW, previewH,
3225                                       cropX, cropY, cropW, cropH,
3226                                       selfStreamParms->outputFormat,
3227                                       0);
3228
3229                    csc_set_dst_format(m_exynosVideoCSC,
3230                                       videoW, videoH,
3231                                       0, 0, videoW, videoH,
3232                                       selfRecordParms->outputFormat,
3233                                       1);
3234
3235                    csc_set_src_buffer(m_exynosVideoCSC,
3236                                   (void **)(&(selfStreamParms->svcBuffers[index].fd.fd)));
3237
3238                    csc_set_dst_buffer(m_exynosVideoCSC,
3239                        (void **)(&(selfRecordParms->svcBuffers[selfRecordParms->svcBufIndex].fd.fd)));
3240
3241                    if (csc_convert(m_exynosVideoCSC) != 0) {
3242                        ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__);
3243                    }
3244                    else {
3245                        ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__);
3246                    }
3247                }
3248                else {
3249                    ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
3250                }
3251
3252                res = selfRecordParms->streamOps->enqueue_buffer(selfRecordParms->streamOps,
3253                        systemTime(),
3254                        &(selfRecordParms->svcBufHandle[selfRecordParms->svcBufIndex]));
3255                ALOGV("DEBUG(%s): stream(%d) record enqueue_buffer to svc done res(%d)", __FUNCTION__,
3256                    selfThread->m_index, res);
3257                if (res == 0) {
3258                    selfRecordParms->svcBufStatus[selfRecordParms->svcBufIndex] = ON_SERVICE;
3259                    selfRecordParms->numSvcBufsInHal--;
3260                }
3261            }
3262            if (m_previewCbOutput && m_previewCbEnabled) {
3263                ALOGV("DEBUG(%s): Entering previewcb creator, index(%d)",__FUNCTION__, selfPreviewCbParms->svcBufIndex);
3264
3265                bool found = false;
3266                for (int i = 0 ; selfPreviewCbParms->numSvcBuffers ; i++) {
3267                    if (selfPreviewCbParms->svcBufStatus[selfPreviewCbParms->svcBufIndex] == ON_HAL) {
3268                        found = true;
3269                        break;
3270                    }
3271                    selfPreviewCbParms->svcBufIndex++;
3272                    if (selfPreviewCbParms->svcBufIndex >= selfPreviewCbParms->numSvcBuffers)
3273                        selfPreviewCbParms->svcBufIndex = 0;
3274                }
3275                if (!found) {
3276                    ALOGE("(%s): cannot find free previewcb buffer", __FUNCTION__);
3277                    selfPreviewCbParms->svcBufIndex++;
3278                    break;
3279                }
3280                if (selfPreviewCbParms->outputFormat == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
3281                    if (m_exynosVideoCSC) {
3282                        int previewCbW = selfPreviewCbParms->outputWidth, previewCbH = selfPreviewCbParms->outputHeight;
3283                        int cropX, cropY, cropW, cropH = 0;
3284                        int previewW = selfStreamParms->outputWidth, previewH = selfStreamParms->outputHeight;
3285                        m_getRatioSize(previewW, previewH,
3286                                       previewCbW, previewCbH,
3287                                       &cropX, &cropY,
3288                                       &cropW, &cropH,
3289                                       0);
3290
3291                        ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
3292                                 __FUNCTION__, cropX, cropY, cropW, cropH);
3293                        csc_set_src_format(m_exynosVideoCSC,
3294                                           previewW, previewH,
3295                                           cropX, cropY, cropW, cropH,
3296                                           selfStreamParms->outputFormat,
3297                                           0);
3298
3299                        csc_set_dst_format(m_exynosVideoCSC,
3300                                           previewCbW, previewCbH,
3301                                           0, 0, previewCbW, previewCbH,
3302                                           selfPreviewCbParms->internalFormat,
3303                                           1);
3304
3305                        csc_set_src_buffer(m_exynosVideoCSC,
3306                                       (void **)(&(selfStreamParms->svcBuffers[index].fd.fd)));
3307
3308                        csc_set_dst_buffer(m_exynosVideoCSC,
3309                            (void **)(&(m_previewCbBuf.fd.fd)));
3310
3311                        if (csc_convert(m_exynosVideoCSC) != 0) {
3312                            ALOGE("ERR(%s):previewcb csc_convert() fail", __FUNCTION__);
3313                        }
3314                        else {
3315                            ALOGV("(%s):previewcb csc_convert() SUCCESS", __FUNCTION__);
3316                        }
3317                        if (previewCbW == ALIGN(previewCbW, 16)) {
3318                            ALOGV("(%s):previewcb %d = %d", __FUNCTION__, previewCbW, ALIGN(previewCbW, 16));
3319                            memcpy(selfPreviewCbParms->svcBuffers[selfPreviewCbParms->svcBufIndex].virt.extP[0],
3320                                m_previewCbBuf.virt.extP[0], previewCbW * previewCbH);
3321                            memcpy(selfPreviewCbParms->svcBuffers[selfPreviewCbParms->svcBufIndex].virt.extP[0] + previewCbW * previewCbH,
3322                                m_previewCbBuf.virt.extP[1], previewCbW * previewCbH / 2 );
3323                        }
3324                        else {
3325                            // TODO : copy line by line ?
3326                        }
3327                    }
3328                    else {
3329                        ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
3330                    }
3331                }
3332                else if (selfPreviewCbParms->outputFormat == HAL_PIXEL_FORMAT_YV12) {
3333                    int previewCbW = selfPreviewCbParms->outputWidth, previewCbH = selfPreviewCbParms->outputHeight;
3334                    int stride = ALIGN(previewCbW, 16);
3335                    int c_stride = ALIGN(stride, 16);
3336                    memcpy(selfPreviewCbParms->svcBuffers[selfPreviewCbParms->svcBufIndex].virt.extP[0],
3337                        selfStreamParms->svcBuffers[index].virt.extP[0], stride * previewCbH);
3338                    memcpy(selfPreviewCbParms->svcBuffers[selfPreviewCbParms->svcBufIndex].virt.extP[0] + stride * previewCbH,
3339                        selfStreamParms->svcBuffers[index].virt.extP[1], c_stride * previewCbH / 2 );
3340                    memcpy(selfPreviewCbParms->svcBuffers[selfPreviewCbParms->svcBufIndex].virt.extP[0] + (stride * previewCbH) + (c_stride * previewCbH / 2),
3341                        selfStreamParms->svcBuffers[index].virt.extP[2], c_stride * previewCbH / 2 );
3342
3343                }
3344                res = selfPreviewCbParms->streamOps->enqueue_buffer(selfPreviewCbParms->streamOps,
3345                        systemTime(),
3346                        &(selfPreviewCbParms->svcBufHandle[selfPreviewCbParms->svcBufIndex]));
3347                ALOGV("DEBUG(%s): stream(%d) previewcb enqueue_buffer[%d] to svc done res(%d)", __FUNCTION__,
3348                    selfThread->m_index, index, res);
3349                if (res == 0) {
3350                    selfPreviewCbParms->svcBufStatus[selfPreviewCbParms->svcBufIndex] = ON_SERVICE;
3351                    selfPreviewCbParms->numSvcBufsInHal--;
3352                }
3353            }
3354            if (m_previewOutput && m_requestManager->GetSkipCnt() <= 0) {
3355
3356                ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex());
3357                res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3358                        m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex()),
3359                        &(selfStreamParms->svcBufHandle[index]));
3360
3361                ALOGV("DEBUG(%s): stream(%d) enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3362            }
3363            else {
3364                res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
3365                        &(selfStreamParms->svcBufHandle[index]));
3366                ALOGV("DEBUG(%s): stream(%d) cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3367            }
3368            if (res == 0) {
3369                selfStreamParms->svcBufStatus[index] = ON_SERVICE;
3370                selfStreamParms->numSvcBufsInHal--;
3371            }
3372            else {
3373                selfStreamParms->svcBufStatus[index] = ON_HAL;
3374            }
3375
3376        }
3377        while (0);
3378
3379        if (m_recordOutput && m_recordingEnabled) {
3380            do {
3381                ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , selfRecordParms->numSvcBufsInHal);
3382                if (selfRecordParms->numSvcBufsInHal >= 1)
3383                {
3384                    ALOGV("DEBUG(%s): breaking", __FUNCTION__);
3385                    break;
3386                }
3387                res = selfRecordParms->streamOps->dequeue_buffer(selfRecordParms->streamOps, &buf);
3388                if (res != NO_ERROR || buf == NULL) {
3389                    ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
3390                    break;
3391                }
3392                selfRecordParms->numSvcBufsInHal ++;
3393                ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3394                   selfRecordParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3395
3396                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3397                bool found = false;
3398                int checkingIndex = 0;
3399                for (checkingIndex = 0; checkingIndex < selfRecordParms->numSvcBuffers ; checkingIndex++) {
3400                    if (priv_handle->fd == selfRecordParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3401                        found = true;
3402                        break;
3403                    }
3404                }
3405                ALOGV("DEBUG(%s): recording dequeueed_buffer found index(%d)", __FUNCTION__, found);
3406
3407                if (!found) {
3408                     break;
3409                }
3410
3411                index = checkingIndex;
3412                if (selfRecordParms->svcBufStatus[index] == ON_SERVICE) {
3413                    selfRecordParms->svcBufStatus[index] = ON_HAL;
3414                }
3415                else {
3416                    ALOGV("DEBUG(%s): record bufstatus abnormal [%d]  status = %d", __FUNCTION__,
3417                        index,  selfRecordParms->svcBufStatus[index]);
3418                }
3419            } while (0);
3420        }
3421        if (m_previewCbOutput && m_previewCbEnabled) {
3422            do {
3423                ALOGV("DEBUG(%s): previewCb currentBuf#(%d)", __FUNCTION__ , selfPreviewCbParms->numSvcBufsInHal);
3424                if (selfPreviewCbParms->numSvcBufsInHal >= 1)
3425                {
3426                    ALOGV("DEBUG(%s): breaking", __FUNCTION__);
3427                    break;
3428                }
3429                res = selfPreviewCbParms->streamOps->dequeue_buffer(selfPreviewCbParms->streamOps, &buf);
3430                if (res != NO_ERROR || buf == NULL) {
3431                    ALOGV("DEBUG(%s): previewcb stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
3432                    break;
3433                }
3434                selfPreviewCbParms->numSvcBufsInHal ++;
3435                ALOGV("DEBUG(%s): previewcb got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3436                   selfPreviewCbParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3437
3438                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3439                bool found = false;
3440                int checkingIndex = 0;
3441                for (checkingIndex = 0; checkingIndex < selfPreviewCbParms->numSvcBuffers ; checkingIndex++) {
3442                    if (priv_handle->fd == selfPreviewCbParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3443                        found = true;
3444                        break;
3445                    }
3446                }
3447                ALOGV("DEBUG(%s): previewcb dequeueed_buffer found index(%d)", __FUNCTION__, found);
3448
3449                if (!found) {
3450                     break;
3451                }
3452
3453                index = checkingIndex;
3454                if (selfPreviewCbParms->svcBufStatus[index] == ON_SERVICE) {
3455                    selfPreviewCbParms->svcBufStatus[index] = ON_HAL;
3456                }
3457                else {
3458                    ALOGV("DEBUG(%s): previewcb bufstatus abnormal [%d]  status = %d", __FUNCTION__,
3459                        index,  selfPreviewCbParms->svcBufStatus[index]);
3460                }
3461            } while (0);
3462        }
3463
3464        while (selfStreamParms->numSvcBufsInHal < selfStreamParms->numOwnSvcBuffers) {
3465            res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3466            if (res != NO_ERROR || buf == NULL) {
3467                ALOGV("DEBUG(%s): stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
3468                break;
3469            }
3470            selfStreamParms->numSvcBufsInHal++;
3471            ALOGV("DEBUG(%s): stream(%d) got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
3472                selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
3473               ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3474            const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3475
3476            bool found = false;
3477            int checkingIndex = 0;
3478            for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3479                if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3480                    found = true;
3481                    break;
3482                }
3483            }
3484            ALOGV("DEBUG(%s): post_dequeue_buffer found(%d)", __FUNCTION__, found);
3485            if (!found) break;
3486            ALOGV("DEBUG(%s): preparing to qbuf [%d]", __FUNCTION__, checkingIndex);
3487            index = checkingIndex;
3488            if (index < selfStreamParms->numHwBuffers) {
3489                uint32_t    plane_index = 0;
3490                ExynosBuffer*  currentBuf = &(selfStreamParms->svcBuffers[index]);
3491                struct v4l2_buffer v4l2_buf;
3492                struct v4l2_plane  planes[VIDEO_MAX_PLANES];
3493
3494                v4l2_buf.m.planes   = planes;
3495                v4l2_buf.type       = currentNode->type;
3496                v4l2_buf.memory     = currentNode->memory;
3497                v4l2_buf.index      = index;
3498                v4l2_buf.length     = currentNode->planes;
3499
3500                v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
3501                v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
3502                v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
3503                for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
3504                    v4l2_buf.m.planes[plane_index].length  = currentBuf->size.extS[plane_index];
3505                    ALOGV("DEBUG(%s): plane(%d): fd(%d)  length(%d)",
3506                         __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
3507                         v4l2_buf.m.planes[plane_index].length);
3508                }
3509#ifdef ENABLE_FRAME_SYNC
3510                /* add plane for metadata*/
3511                v4l2_buf.length += selfStreamParms->metaPlanes;
3512                v4l2_buf.m.planes[3].m.fd = selfStreamParms->metaBuffers[index].fd.extFd[0];
3513                v4l2_buf.m.planes[3].length = selfStreamParms->metaBuffers[index].size.extS[0];
3514#endif
3515                if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
3516                    ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail",
3517                        __FUNCTION__, selfThread->m_index);
3518                    return;
3519                }
3520                selfStreamParms->svcBufStatus[index] = ON_DRIVER;
3521                ALOGV("DEBUG(%s): stream id(%d) type0 QBUF done index(%d)",
3522                    __FUNCTION__, selfThread->m_index, index);
3523            }
3524        }
3525
3526        ALOGV("DEBUG(%s): stream(%d) processing SIGNAL_STREAM_DATA_COMING DONE",
3527            __FUNCTION__,selfThread->m_index);
3528    }
3529
3530
3531    if (currentSignal & SIGNAL_THREAD_RELEASE) {
3532        int i, index = -1, cnt_to_dq = 0;
3533        status_t res;
3534        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
3535        ALOGD("(%s):(%d) SIGNAL_THREAD_RELEASE", __FUNCTION__, selfStreamParms->streamType);
3536
3537        if (selfThread->m_isBufferInit) {
3538            for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
3539                ALOGV("DEBUG(%s): checking buffer index[%d] - status(%d)",
3540                    __FUNCTION__, i, selfStreamParms->svcBufStatus[i]);
3541                if (selfStreamParms->svcBufStatus[i] ==ON_DRIVER) cnt_to_dq++;
3542            }
3543
3544            ALOGV("DEBUG(%s): calling stream(%d) streamoff (fd:%d)", __FUNCTION__,
3545            selfThread->m_index, selfStreamParms->fd);
3546            if (cam_int_streamoff(&(selfStreamParms->node)) < 0 ){
3547                ALOGE("ERR(%s): stream off fail", __FUNCTION__);
3548            } else {
3549                    m_scp_closing = true;
3550            }
3551            ALOGV("DEBUG(%s): calling stream(%d) streamoff done", __FUNCTION__, selfThread->m_index);
3552            ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 (fd:%d)", __FUNCTION__,
3553                    selfThread->m_index, selfStreamParms->fd);
3554            currentNode->buffers = 0;
3555            cam_int_reqbufs(currentNode);
3556            ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 DONE(fd:%d)", __FUNCTION__,
3557                    selfThread->m_index, selfStreamParms->fd);
3558        }
3559#ifdef ENABLE_FRAME_SYNC
3560        // free metabuffers
3561        for(i = 0; i < NUM_MAX_CAMERA_BUFFERS; i++)
3562            if(selfStreamParms->metaBuffers[i].fd.extFd[0] != 0){
3563                freeCameraMemory(&(selfStreamParms->metaBuffers[i]), 1);
3564                selfStreamParms->metaBuffers[i].fd.extFd[0] = 0;
3565                selfStreamParms->metaBuffers[i].size.extS[0] = 0;
3566            }
3567#endif
3568        selfThread->m_isBufferInit = false;
3569        selfThread->m_index = 255;
3570
3571        selfThread->m_releasing = false;
3572
3573        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
3574
3575        return;
3576    }
3577
3578    return;
3579}
3580
3581void ExynosCameraHWInterface2::m_streamFunc1(SignalDrivenThread *self)
3582{
3583    uint32_t                currentSignal   = self->GetProcessingSignal();
3584    StreamThread *          selfThread      = ((StreamThread*)self);
3585    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3586    record_parameters_t     *selfRecordParms =  &(selfThread->m_recordParameters);
3587    node_info_t             *currentNode    = &(selfStreamParms->node);
3588
3589    if (currentSignal & SIGNAL_STREAM_CHANGE_PARAMETER) {
3590        ALOGV("DEBUG(%s): processing SIGNAL_STREAM_CHANGE_PARAMETER", __FUNCTION__);
3591
3592        m_resizeBuf.size.extS[0] = ALIGN(selfStreamParms->outputWidth, 16) * ALIGN(selfStreamParms->outputHeight, 16) * 2;
3593        m_resizeBuf.size.extS[1] = 0;
3594        m_resizeBuf.size.extS[2] = 0;
3595
3596        if (allocCameraMemory(selfStreamParms->ionClient, &m_resizeBuf, 1) == -1) {
3597            ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__);
3598        }
3599
3600        ALOGV("DEBUG(%s): processing SIGNAL_STREAM_CHANGE_PARAMETER DONE", __FUNCTION__);
3601    }
3602
3603    if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
3604        buffer_handle_t * buf = NULL;
3605        status_t res;
3606        void *virtAddr[3];
3607        int i, j;
3608        int index;
3609        nsecs_t timestamp;
3610
3611        ALOGV("DEBUG(%s): stream(%d) processing SIGNAL_STREAM_DATA_COMING",
3612            __FUNCTION__,selfThread->m_index);
3613
3614        m_streamBufferInit(self);
3615
3616        do {
3617            ExynosRect jpegRect;
3618            bool found = false;
3619            bool ret = false;
3620            int pictureW, pictureH, pictureFramesize = 0;
3621            int pictureFormat;
3622            int cropX, cropY, cropW, cropH = 0;
3623            ExynosBuffer resizeBufInfo;
3624            ExynosRect   m_orgPictureRect;
3625            camera2_stream *frame;
3626
3627            ALOGD("DEBUG(%s): stream(%d) type(%d) DQBUF START ",__FUNCTION__,
3628                selfThread->m_index, selfStreamParms->streamType);
3629            index = cam_int_dqbuf(&(selfStreamParms->node));
3630            ALOGD("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
3631                selfThread->m_index, selfStreamParms->streamType, index);
3632
3633#ifdef ENABLE_FRAME_SYNC
3634            frame = (struct camera2_stream *)(selfStreamParms->svcBuffers[index].virt.extP[selfStreamParms->nodePlanes -1]);
3635            ALOGD("frame count(SCC) : %d", frame->fcount);
3636#endif
3637            for (int i = 0; i < selfStreamParms->numSvcBuffers ; i++) {
3638                if (selfStreamParms->svcBufStatus[selfStreamParms->svcBufIndex] == ON_HAL) {
3639                    found = true;
3640                    break;
3641                }
3642                selfStreamParms->svcBufIndex++;
3643                if (selfStreamParms->svcBufIndex >= selfStreamParms->numSvcBuffers)
3644                    selfStreamParms->svcBufIndex = 0;
3645            }
3646            if (!found) {
3647                ALOGE("ERR(%s): NO free SVC buffer for JPEG", __FUNCTION__);
3648                break;
3649            }
3650
3651            m_orgPictureRect.w = selfStreamParms->outputWidth;
3652            m_orgPictureRect.h = selfStreamParms->outputHeight;
3653
3654            ExynosBuffer* m_pictureBuf = &(m_camera_info.capture.buffer[index]);
3655
3656            m_getRatioSize(selfStreamParms->nodeWidth, selfStreamParms->nodeHeight,
3657                           m_orgPictureRect.w, m_orgPictureRect.h,
3658                           &cropX, &cropY,
3659                           &pictureW, &pictureH,
3660                           0);
3661            pictureFormat = V4L2_PIX_FMT_YUYV;
3662            pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
3663
3664            if (m_exynosPictureCSC) {
3665                m_getRatioSize(pictureW, pictureH,
3666                               m_orgPictureRect.w, m_orgPictureRect.h,
3667                               &cropX, &cropY,
3668                               &cropW, &cropH,
3669                               0);
3670
3671                ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
3672                      __FUNCTION__, cropX, cropY, cropW, cropH);
3673
3674                csc_set_src_format(m_exynosPictureCSC,
3675                                   ALIGN(pictureW, 16), ALIGN(pictureH, 16),
3676                                   cropX, cropY, cropW, cropH,
3677                                   V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
3678                                   0);
3679
3680                csc_set_dst_format(m_exynosPictureCSC,
3681                                   m_orgPictureRect.w, m_orgPictureRect.h,
3682                                   0, 0, m_orgPictureRect.w, m_orgPictureRect.h,
3683                                   V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
3684                                   0);
3685                csc_set_src_buffer(m_exynosPictureCSC,
3686                                   (void **)&m_pictureBuf->fd.fd);
3687
3688                csc_set_dst_buffer(m_exynosPictureCSC,
3689                                   (void **)&m_resizeBuf.fd.fd);
3690                for (int i = 0 ; i < 3 ; i++)
3691                    ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d",
3692                        __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
3693
3694                if (csc_convert(m_exynosPictureCSC) != 0)
3695                    ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__);
3696
3697
3698            }
3699            else {
3700                ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__);
3701            }
3702
3703            resizeBufInfo = m_resizeBuf;
3704
3705            m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_orgPictureRect.w, m_orgPictureRect.h, &m_resizeBuf);
3706
3707            for (int i = 1; i < 3; i++) {
3708                if (m_resizeBuf.size.extS[i] != 0)
3709                    m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1];
3710
3711                ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]);
3712            }
3713
3714            jpegRect.w = m_orgPictureRect.w;
3715            jpegRect.h = m_orgPictureRect.h;
3716            jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
3717
3718            if (yuv2Jpeg(&m_resizeBuf, &selfStreamParms->svcBuffers[selfStreamParms->svcBufIndex], &jpegRect) == false)
3719                ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
3720            cam_int_qbuf(&(selfStreamParms->node), index);
3721            ALOGV("DEBUG(%s): stream(%d) type(%d) QBUF DONE ",__FUNCTION__,
3722                selfThread->m_index, selfStreamParms->streamType);
3723
3724            m_resizeBuf = resizeBufInfo;
3725
3726            res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, systemTime(), &(selfStreamParms->svcBufHandle[selfStreamParms->svcBufIndex]));
3727
3728            ALOGV("DEBUG(%s): stream(%d) enqueue_buffer index(%d) to svc done res(%d)",
3729                    __FUNCTION__, selfThread->m_index, selfStreamParms->svcBufIndex, res);
3730            if (res == 0) {
3731                selfStreamParms->svcBufStatus[selfStreamParms->svcBufIndex] = ON_SERVICE;
3732                selfStreamParms->numSvcBufsInHal--;
3733            }
3734            else {
3735                selfStreamParms->svcBufStatus[selfStreamParms->svcBufIndex] = ON_HAL;
3736            }
3737        }
3738        while (0);
3739
3740        while (selfStreamParms->numSvcBufsInHal < selfStreamParms->numOwnSvcBuffers) {
3741            res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3742            if (res != NO_ERROR || buf == NULL) {
3743                ALOGV("DEBUG(%s): stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
3744                break;
3745            }
3746
3747            ALOGV("DEBUG(%s): stream(%d) got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
3748                selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
3749                ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3750
3751            const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3752
3753            bool found = false;
3754            int checkingIndex = 0;
3755            for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3756                if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3757                    found = true;
3758                    break;
3759                }
3760            }
3761            if (!found) break;
3762            selfStreamParms->svcBufStatus[checkingIndex] = ON_HAL;
3763            selfStreamParms->numSvcBufsInHal++;
3764        }
3765
3766        ALOGV("DEBUG(%s): stream(%d) processing SIGNAL_STREAM_DATA_COMING DONE",
3767            __FUNCTION__,selfThread->m_index);
3768    }
3769
3770    if (currentSignal & SIGNAL_THREAD_RELEASE) {
3771        int i, index = -1, cnt_to_dq = 0;
3772        status_t res;
3773        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
3774        ALOGD("(%s):(%d) SIGNAL_THREAD_RELEASE", __FUNCTION__, selfStreamParms->streamType);
3775
3776        if (selfThread->m_isBufferInit) {
3777            for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
3778                ALOGV("DEBUG(%s): checking buffer index[%d] - status(%d)",
3779                    __FUNCTION__, i, selfStreamParms->svcBufStatus[i]);
3780                if (selfStreamParms->svcBufStatus[i] ==ON_DRIVER) cnt_to_dq++;
3781            }
3782
3783            ALOGV("DEBUG(%s): calling stream(%d) streamoff (fd:%d)", __FUNCTION__,
3784            selfThread->m_index, selfStreamParms->fd);
3785            if (cam_int_streamoff(&(selfStreamParms->node)) < 0 ){
3786                ALOGE("ERR(%s): stream off fail", __FUNCTION__);
3787            } else {
3788                    m_camera_info.capture.status = false;
3789            }
3790            ALOGV("DEBUG(%s): calling stream(%d) streamoff done", __FUNCTION__, selfThread->m_index);
3791            ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 (fd:%d)", __FUNCTION__,
3792                    selfThread->m_index, selfStreamParms->fd);
3793            currentNode->buffers = 0;
3794            cam_int_reqbufs(currentNode);
3795            ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 DONE(fd:%d)", __FUNCTION__,
3796                    selfThread->m_index, selfStreamParms->fd);
3797        }
3798        if (selfThread->m_index == 1 && m_resizeBuf.size.s != 0) {
3799            freeCameraMemory(&m_resizeBuf, 1);
3800        }
3801        selfThread->m_isBufferInit = false;
3802        selfThread->m_index = 255;
3803
3804        selfThread->m_releasing = false;
3805
3806        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
3807
3808        return;
3809    }
3810
3811    return;
3812}
3813
3814
3815void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
3816{
3817    uint32_t                currentSignal   = self->GetProcessingSignal();
3818    StreamThread *          selfThread      = ((StreamThread*)self);
3819    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
3820    record_parameters_t     *selfRecordParms =  &(selfThread->m_recordParameters);
3821    node_info_t             *currentNode    = &(selfStreamParms->node);
3822
3823    ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal);
3824
3825    if (currentSignal & SIGNAL_STREAM_CHANGE_PARAMETER) {
3826        ALOGV("DEBUG(%s): processing SIGNAL_STREAM_CHANGE_PARAMETER", __FUNCTION__);
3827
3828        //Do something in Parent thread handler
3829        selfThread->applyChange();
3830
3831        ALOGV("DEBUG(%s): processing SIGNAL_STREAM_CHANGE_PARAMETER DONE", __FUNCTION__);
3832    }
3833
3834    // Do something in Child thread handler
3835    // Should change function to class that inherited StreamThread class to support dynamic stream allocation
3836    if (selfStreamParms->streamType == STREAM_TYPE_DIRECT)
3837    {
3838        m_streamFunc0(self);
3839    }
3840    else if (selfStreamParms->streamType == STREAM_TYPE_INDIRECT)
3841    {
3842        m_streamFunc1(self);
3843    }
3844
3845    if (currentSignal & SIGNAL_THREAD_RELEASE) {
3846        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
3847        ALOGD("(%s):(%d) SIGNAL_THREAD_RELEASE", __FUNCTION__, selfStreamParms->streamType);
3848
3849        //Do something in Parent thread handler
3850
3851        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
3852
3853        return;
3854    }
3855
3856    return;
3857}
3858
3859bool ExynosCameraHWInterface2::m_checkThumbnailSize(int w, int h)
3860{
3861    int sizeOfSupportList;
3862
3863    //REAR Camera
3864    if(this->getCameraId() == 0) {
3865        sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_REAR_SIZE) / (sizeof(int)*2);
3866
3867        for(int i = 0; i < sizeOfSupportList; i++) {
3868            if((SUPPORT_THUMBNAIL_REAR_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_REAR_SIZE[i][1] == h))
3869                return true;
3870        }
3871
3872    }
3873    else {
3874        sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_FRONT_SIZE) / (sizeof(int)*2);
3875
3876        for(int i = 0; i < sizeOfSupportList; i++) {
3877            if((SUPPORT_THUMBNAIL_FRONT_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_FRONT_SIZE[i][1] == h))
3878                return true;
3879        }
3880    }
3881
3882    return false;
3883}
3884bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
3885                            ExynosBuffer *jpegBuf,
3886                            ExynosRect *rect)
3887{
3888    unsigned char *addr;
3889
3890    ExynosJpegEncoderForCamera jpegEnc;
3891    bool ret = false;
3892    int res = 0;
3893
3894    unsigned int *yuvSize = yuvBuf->size.extS;
3895
3896    if (jpegEnc.create()) {
3897        ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__);
3898        goto jpeg_encode_done;
3899    }
3900
3901    if (jpegEnc.setQuality(100)) {
3902        ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__);
3903        goto jpeg_encode_done;
3904    }
3905
3906    if (jpegEnc.setSize(rect->w, rect->h)) {
3907        ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__);
3908        goto jpeg_encode_done;
3909    }
3910    ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h);
3911
3912    if (jpegEnc.setColorFormat(rect->colorFormat)) {
3913        ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__);
3914        goto jpeg_encode_done;
3915    }
3916
3917    if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) {
3918        ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
3919        goto jpeg_encode_done;
3920    }
3921
3922    if((m_jpegMetadata.ctl.jpeg.thumbnailSize[0] != 0) && (m_jpegMetadata.ctl.jpeg.thumbnailSize[1] != 0)) {
3923        mExifInfo.enableThumb = true;
3924        if(!m_checkThumbnailSize(m_jpegMetadata.ctl.jpeg.thumbnailSize[0], m_jpegMetadata.ctl.jpeg.thumbnailSize[1])) {
3925            //default value
3926            m_thumbNailW = SUPPORT_THUMBNAIL_REAR_SIZE[0][0];
3927            m_thumbNailH = SUPPORT_THUMBNAIL_REAR_SIZE[0][1];
3928        } else {
3929            m_thumbNailW = m_jpegMetadata.ctl.jpeg.thumbnailSize[0];
3930            m_thumbNailH = m_jpegMetadata.ctl.jpeg.thumbnailSize[1];
3931        }
3932
3933        ALOGV("(%s) m_thumbNailW = %d, m_thumbNailH = %d", __FUNCTION__, m_thumbNailW, m_thumbNailH);
3934
3935    } else {
3936        mExifInfo.enableThumb = false;
3937    }
3938
3939    if (jpegEnc.setThumbnailSize(m_thumbNailW, m_thumbNailH)) {
3940        ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, m_thumbNailH, m_thumbNailH);
3941        goto jpeg_encode_done;
3942    }
3943
3944    ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, m_thumbNailW, m_thumbNailW);
3945    if (jpegEnc.setThumbnailQuality(50)) {
3946        ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__);
3947        goto jpeg_encode_done;
3948    }
3949
3950    m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata);
3951    ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
3952    if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) {
3953        ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
3954        goto jpeg_encode_done;
3955    }
3956    if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) {
3957        ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__);
3958        goto jpeg_encode_done;
3959    }
3960
3961    if (jpegEnc.updateConfig()) {
3962        ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__);
3963        goto jpeg_encode_done;
3964    }
3965
3966    if (res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo)) {
3967        ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res);
3968        goto jpeg_encode_done;
3969    }
3970
3971    ret = true;
3972
3973jpeg_encode_done:
3974
3975    if (jpegEnc.flagCreate() == true)
3976        jpegEnc.destroy();
3977
3978    return ret;
3979}
3980
3981void ExynosCameraHWInterface2::OnAfTriggerStart(int id)
3982{
3983    m_afPendingTriggerId = id;
3984    m_afModeWaitingCnt = 3;
3985}
3986
3987void ExynosCameraHWInterface2::OnAfTrigger(int id)
3988{
3989    m_afTriggerId = id;
3990
3991    switch (m_afMode) {
3992    case AA_AFMODE_AUTO:
3993    case AA_AFMODE_MACRO:
3994    case AA_AFMODE_OFF:
3995        OnAfTriggerAutoMacro(id);
3996        break;
3997    case AA_AFMODE_CONTINUOUS_VIDEO:
3998        OnAfTriggerCAFVideo(id);
3999        break;
4000    case AA_AFMODE_CONTINUOUS_PICTURE:
4001        OnAfTriggerCAFPicture(id);
4002        break;
4003
4004    default:
4005        break;
4006    }
4007}
4008
4009void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int id)
4010{
4011    int nextState = NO_TRANSITION;
4012
4013    switch (m_afState) {
4014    case HAL_AFSTATE_INACTIVE:
4015        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4016        m_IsAfTriggerRequired = true;
4017        break;
4018    case HAL_AFSTATE_NEEDS_COMMAND:
4019        nextState = NO_TRANSITION;
4020        break;
4021    case HAL_AFSTATE_STARTED:
4022        nextState = NO_TRANSITION;
4023        break;
4024    case HAL_AFSTATE_SCANNING:
4025        nextState = NO_TRANSITION;
4026        break;
4027    case HAL_AFSTATE_LOCKED:
4028        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4029        m_IsAfTriggerRequired = true;
4030        break;
4031    case HAL_AFSTATE_FAILED:
4032        nextState = HAL_AFSTATE_NEEDS_COMMAND;
4033        m_IsAfTriggerRequired = true;
4034        break;
4035    default:
4036        break;
4037    }
4038    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4039    if (nextState != NO_TRANSITION)
4040        m_afState = nextState;
4041}
4042
4043void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id)
4044{
4045    int nextState = NO_TRANSITION;
4046
4047    switch (m_afState) {
4048    case HAL_AFSTATE_INACTIVE:
4049        nextState = HAL_AFSTATE_FAILED;
4050        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4051        break;
4052    case HAL_AFSTATE_NEEDS_COMMAND:
4053        // not used
4054        break;
4055    case HAL_AFSTATE_STARTED:
4056        nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4057        m_AfHwStateFailed = false;
4058        break;
4059    case HAL_AFSTATE_SCANNING:
4060        nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4061        m_AfHwStateFailed = false;
4062        break;
4063    case HAL_AFSTATE_NEEDS_DETERMINATION:
4064        nextState = NO_TRANSITION;
4065        break;
4066    case HAL_AFSTATE_PASSIVE_FOCUSED:
4067        m_IsAfLockRequired = true;
4068        if (m_AfHwStateFailed) {
4069            ALOGV("(%s): LAST : fail", __FUNCTION__);
4070            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4071            nextState = HAL_AFSTATE_FAILED;
4072        }
4073        else {
4074            ALOGV("(%s): LAST : success", __FUNCTION__);
4075            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4076            nextState = HAL_AFSTATE_LOCKED;
4077        }
4078        m_AfHwStateFailed = false;
4079        break;
4080    case HAL_AFSTATE_LOCKED:
4081        nextState = NO_TRANSITION;
4082        break;
4083    case HAL_AFSTATE_FAILED:
4084        nextState = NO_TRANSITION;
4085        break;
4086    default:
4087        break;
4088    }
4089    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4090    if (nextState != NO_TRANSITION)
4091        m_afState = nextState;
4092}
4093
4094
4095void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int id)
4096{
4097    int nextState = NO_TRANSITION;
4098
4099    switch (m_afState) {
4100    case HAL_AFSTATE_INACTIVE:
4101        nextState = HAL_AFSTATE_FAILED;
4102        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4103        break;
4104    case HAL_AFSTATE_NEEDS_COMMAND:
4105        // not used
4106        break;
4107    case HAL_AFSTATE_STARTED:
4108        m_IsAfLockRequired = true;
4109        nextState = HAL_AFSTATE_FAILED;
4110        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4111        break;
4112    case HAL_AFSTATE_SCANNING:
4113        m_IsAfLockRequired = true;
4114        nextState = HAL_AFSTATE_FAILED;
4115        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4116        break;
4117    case HAL_AFSTATE_NEEDS_DETERMINATION:
4118        // not used
4119        break;
4120    case HAL_AFSTATE_PASSIVE_FOCUSED:
4121        m_IsAfLockRequired = true;
4122        SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4123        nextState = HAL_AFSTATE_LOCKED;
4124        break;
4125    case HAL_AFSTATE_LOCKED:
4126        nextState = NO_TRANSITION;
4127        break;
4128    case HAL_AFSTATE_FAILED:
4129        nextState = NO_TRANSITION;
4130        break;
4131    default:
4132        break;
4133    }
4134    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4135    if (nextState != NO_TRANSITION)
4136        m_afState = nextState;
4137}
4138
4139void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti)
4140{
4141    switch (m_afMode) {
4142    case AA_AFMODE_AUTO:
4143    case AA_AFMODE_MACRO:
4144        OnAfNotificationAutoMacro(noti);
4145        break;
4146    case AA_AFMODE_CONTINUOUS_VIDEO:
4147        OnAfNotificationCAFVideo(noti);
4148        break;
4149    case AA_AFMODE_CONTINUOUS_PICTURE:
4150        OnAfNotificationCAFPicture(noti);
4151        break;
4152    case AA_AFMODE_OFF:
4153    default:
4154        break;
4155    }
4156}
4157
4158void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti)
4159{
4160    int nextState = NO_TRANSITION;
4161    bool bWrongTransition = false;
4162
4163    if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) {
4164        switch (noti) {
4165        case AA_AFSTATE_INACTIVE:
4166        case AA_AFSTATE_ACTIVE_SCAN:
4167        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4168        case AA_AFSTATE_AF_FAILED_FOCUS:
4169        default:
4170            nextState = NO_TRANSITION;
4171            break;
4172        }
4173    }
4174    else if (m_afState == HAL_AFSTATE_STARTED) {
4175        switch (noti) {
4176        case AA_AFSTATE_INACTIVE:
4177            nextState = NO_TRANSITION;
4178            break;
4179        case AA_AFSTATE_ACTIVE_SCAN:
4180            nextState = HAL_AFSTATE_SCANNING;
4181            SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN);
4182            break;
4183        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4184            nextState = NO_TRANSITION;
4185            break;
4186        case AA_AFSTATE_AF_FAILED_FOCUS:
4187            nextState = NO_TRANSITION;
4188            break;
4189        default:
4190            bWrongTransition = true;
4191            break;
4192        }
4193    }
4194    else if (m_afState == HAL_AFSTATE_SCANNING) {
4195        switch (noti) {
4196        case AA_AFSTATE_INACTIVE:
4197            bWrongTransition = true;
4198            break;
4199        case AA_AFSTATE_ACTIVE_SCAN:
4200            nextState = NO_TRANSITION;
4201            break;
4202        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4203            nextState = HAL_AFSTATE_LOCKED;
4204            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4205            break;
4206        case AA_AFSTATE_AF_FAILED_FOCUS:
4207            nextState = HAL_AFSTATE_FAILED;
4208            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4209            break;
4210        default:
4211            bWrongTransition = true;
4212            break;
4213        }
4214    }
4215    else if (m_afState == HAL_AFSTATE_LOCKED) {
4216        switch (noti) {
4217            case AA_AFSTATE_INACTIVE:
4218            case AA_AFSTATE_ACTIVE_SCAN:
4219                bWrongTransition = true;
4220                break;
4221            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4222                // Flash off if flash mode is available.
4223                if (m_afFlashEnableFlg)
4224                    m_afFlashCnt = IF_FLASH_AF_OFF;
4225                nextState = NO_TRANSITION;
4226                break;
4227            case AA_AFSTATE_AF_FAILED_FOCUS:
4228            default:
4229                bWrongTransition = true;
4230                break;
4231        }
4232    }
4233    else if (m_afState == HAL_AFSTATE_FAILED) {
4234        switch (noti) {
4235            case AA_AFSTATE_INACTIVE:
4236            case AA_AFSTATE_ACTIVE_SCAN:
4237            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4238                bWrongTransition = true;
4239                break;
4240            case AA_AFSTATE_AF_FAILED_FOCUS:
4241                // Flash off if flash mode is available.
4242                if (m_afFlashEnableFlg)
4243                    m_afFlashCnt = IF_FLASH_AF_OFF;
4244                nextState = NO_TRANSITION;
4245                break;
4246            default:
4247                bWrongTransition = true;
4248                break;
4249        }
4250    }
4251    if (bWrongTransition) {
4252        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
4253        return;
4254    }
4255    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
4256    if (nextState != NO_TRANSITION)
4257        m_afState = nextState;
4258}
4259
4260void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti)
4261{
4262    int nextState = NO_TRANSITION;
4263    bool bWrongTransition = false;
4264
4265    if (m_afState == HAL_AFSTATE_INACTIVE) {
4266        switch (noti) {
4267        case AA_AFSTATE_INACTIVE:
4268        case AA_AFSTATE_ACTIVE_SCAN:
4269        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4270        case AA_AFSTATE_AF_FAILED_FOCUS:
4271        default:
4272            nextState = NO_TRANSITION;
4273            break;
4274        }
4275    }
4276    else if (m_afState == HAL_AFSTATE_STARTED) {
4277        switch (noti) {
4278        case AA_AFSTATE_INACTIVE:
4279            nextState = NO_TRANSITION;
4280            break;
4281        case AA_AFSTATE_ACTIVE_SCAN:
4282            nextState = HAL_AFSTATE_SCANNING;
4283            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
4284            break;
4285        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4286            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
4287            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
4288            break;
4289        case AA_AFSTATE_AF_FAILED_FOCUS:
4290            //nextState = HAL_AFSTATE_FAILED;
4291            //SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4292            nextState = NO_TRANSITION;
4293            break;
4294        default:
4295            bWrongTransition = true;
4296            break;
4297        }
4298    }
4299    else if (m_afState == HAL_AFSTATE_SCANNING) {
4300        switch (noti) {
4301        case AA_AFSTATE_INACTIVE:
4302            nextState = NO_TRANSITION;
4303            break;
4304        case AA_AFSTATE_ACTIVE_SCAN:
4305            nextState = NO_TRANSITION;
4306            m_AfHwStateFailed = false;
4307            break;
4308        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4309            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
4310            m_AfHwStateFailed = false;
4311            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
4312            break;
4313        case AA_AFSTATE_AF_FAILED_FOCUS:
4314            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
4315            m_AfHwStateFailed = true;
4316            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
4317            break;
4318        default:
4319            bWrongTransition = true;
4320            break;
4321        }
4322    }
4323    else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
4324        switch (noti) {
4325        case AA_AFSTATE_INACTIVE:
4326            nextState = NO_TRANSITION;
4327            break;
4328        case AA_AFSTATE_ACTIVE_SCAN:
4329            nextState = HAL_AFSTATE_SCANNING;
4330            m_AfHwStateFailed = false;
4331            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
4332            break;
4333        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4334            nextState = NO_TRANSITION;
4335            m_AfHwStateFailed = false;
4336            break;
4337        case AA_AFSTATE_AF_FAILED_FOCUS:
4338            nextState = NO_TRANSITION;
4339            m_AfHwStateFailed = true;
4340            break;
4341        default:
4342            bWrongTransition = true;
4343            break;
4344        }
4345    }
4346    else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
4347        switch (noti) {
4348        case AA_AFSTATE_INACTIVE:
4349            nextState = NO_TRANSITION;
4350            break;
4351        case AA_AFSTATE_ACTIVE_SCAN:
4352            nextState = NO_TRANSITION;
4353            break;
4354        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4355            m_IsAfLockRequired = true;
4356            nextState = HAL_AFSTATE_LOCKED;
4357            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4358            break;
4359        case AA_AFSTATE_AF_FAILED_FOCUS:
4360            m_IsAfLockRequired = true;
4361            nextState = HAL_AFSTATE_FAILED;
4362            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4363            break;
4364        default:
4365            bWrongTransition = true;
4366            break;
4367        }
4368    }
4369    else if (m_afState == HAL_AFSTATE_LOCKED) {
4370        switch (noti) {
4371            case AA_AFSTATE_INACTIVE:
4372                nextState = NO_TRANSITION;
4373                break;
4374            case AA_AFSTATE_ACTIVE_SCAN:
4375                bWrongTransition = true;
4376                break;
4377            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4378                nextState = NO_TRANSITION;
4379                break;
4380            case AA_AFSTATE_AF_FAILED_FOCUS:
4381            default:
4382                bWrongTransition = true;
4383                break;
4384        }
4385    }
4386    else if (m_afState == HAL_AFSTATE_FAILED) {
4387        switch (noti) {
4388            case AA_AFSTATE_INACTIVE:
4389                bWrongTransition = true;
4390                break;
4391            case AA_AFSTATE_ACTIVE_SCAN:
4392                nextState = HAL_AFSTATE_SCANNING;
4393                break;
4394            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4395                bWrongTransition = true;
4396                break;
4397            case AA_AFSTATE_AF_FAILED_FOCUS:
4398                nextState = NO_TRANSITION;
4399                break;
4400            default:
4401                bWrongTransition = true;
4402                break;
4403        }
4404    }
4405    if (bWrongTransition) {
4406        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
4407        return;
4408    }
4409    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
4410    if (nextState != NO_TRANSITION)
4411        m_afState = nextState;
4412}
4413
4414void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti)
4415{
4416    int nextState = NO_TRANSITION;
4417    bool bWrongTransition = false;
4418
4419    if (m_afState == HAL_AFSTATE_INACTIVE) {
4420        switch (noti) {
4421        case AA_AFSTATE_INACTIVE:
4422        case AA_AFSTATE_ACTIVE_SCAN:
4423        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4424        case AA_AFSTATE_AF_FAILED_FOCUS:
4425        default:
4426            nextState = NO_TRANSITION;
4427            break;
4428        }
4429    }
4430    else if (m_afState == HAL_AFSTATE_STARTED) {
4431        switch (noti) {
4432        case AA_AFSTATE_INACTIVE:
4433            nextState = NO_TRANSITION;
4434            break;
4435        case AA_AFSTATE_ACTIVE_SCAN:
4436            nextState = HAL_AFSTATE_SCANNING;
4437            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
4438            break;
4439        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4440            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
4441            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
4442            break;
4443        case AA_AFSTATE_AF_FAILED_FOCUS:
4444            nextState = HAL_AFSTATE_FAILED;
4445            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4446            break;
4447        default:
4448            bWrongTransition = true;
4449            break;
4450        }
4451    }
4452    else if (m_afState == HAL_AFSTATE_SCANNING) {
4453        switch (noti) {
4454        case AA_AFSTATE_INACTIVE:
4455            bWrongTransition = true;
4456            break;
4457        case AA_AFSTATE_ACTIVE_SCAN:
4458            nextState = NO_TRANSITION;
4459            break;
4460        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4461            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
4462            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
4463            break;
4464        case AA_AFSTATE_AF_FAILED_FOCUS:
4465            nextState = NO_TRANSITION;
4466            m_IsAfTriggerRequired = true;
4467            break;
4468        default:
4469            bWrongTransition = true;
4470            break;
4471        }
4472    }
4473    else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
4474        switch (noti) {
4475        case AA_AFSTATE_INACTIVE:
4476            bWrongTransition = true;
4477            break;
4478        case AA_AFSTATE_ACTIVE_SCAN:
4479            nextState = HAL_AFSTATE_SCANNING;
4480            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
4481            break;
4482        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4483            nextState = NO_TRANSITION;
4484            break;
4485        case AA_AFSTATE_AF_FAILED_FOCUS:
4486            nextState = HAL_AFSTATE_FAILED;
4487            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4488            // TODO : needs NO_TRANSITION ?
4489            break;
4490        default:
4491            bWrongTransition = true;
4492            break;
4493        }
4494    }
4495    else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
4496        switch (noti) {
4497        case AA_AFSTATE_INACTIVE:
4498            bWrongTransition = true;
4499            break;
4500        case AA_AFSTATE_ACTIVE_SCAN:
4501            nextState = NO_TRANSITION;
4502            break;
4503        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4504            m_IsAfLockRequired = true;
4505            nextState = HAL_AFSTATE_LOCKED;
4506            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4507            break;
4508        case AA_AFSTATE_AF_FAILED_FOCUS:
4509            nextState = HAL_AFSTATE_FAILED;
4510            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4511            break;
4512        default:
4513            bWrongTransition = true;
4514            break;
4515        }
4516    }
4517    else if (m_afState == HAL_AFSTATE_LOCKED) {
4518        switch (noti) {
4519            case AA_AFSTATE_INACTIVE:
4520                nextState = NO_TRANSITION;
4521                break;
4522            case AA_AFSTATE_ACTIVE_SCAN:
4523                bWrongTransition = true;
4524                break;
4525            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4526                nextState = NO_TRANSITION;
4527                break;
4528            case AA_AFSTATE_AF_FAILED_FOCUS:
4529            default:
4530                bWrongTransition = true;
4531                break;
4532        }
4533    }
4534    else if (m_afState == HAL_AFSTATE_FAILED) {
4535        switch (noti) {
4536            case AA_AFSTATE_INACTIVE:
4537            case AA_AFSTATE_ACTIVE_SCAN:
4538            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
4539                bWrongTransition = true;
4540                break;
4541            case AA_AFSTATE_AF_FAILED_FOCUS:
4542                nextState = NO_TRANSITION;
4543                break;
4544            default:
4545                bWrongTransition = true;
4546                break;
4547        }
4548    }
4549    if (bWrongTransition) {
4550        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
4551        return;
4552    }
4553    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
4554    if (nextState != NO_TRANSITION)
4555        m_afState = nextState;
4556}
4557
4558void ExynosCameraHWInterface2::OnAfCancel(int id)
4559{
4560    m_afTriggerId = id;
4561
4562    switch (m_afMode) {
4563    case AA_AFMODE_AUTO:
4564    case AA_AFMODE_MACRO:
4565    case AA_AFMODE_OFF:
4566        OnAfCancelAutoMacro(id);
4567        break;
4568    case AA_AFMODE_CONTINUOUS_VIDEO:
4569        OnAfCancelCAFVideo(id);
4570        break;
4571    case AA_AFMODE_CONTINUOUS_PICTURE:
4572        OnAfCancelCAFPicture(id);
4573        break;
4574    default:
4575        break;
4576    }
4577}
4578
4579void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int id)
4580{
4581    int nextState = NO_TRANSITION;
4582    m_afTriggerId = id;
4583
4584    if (m_afFlashEnableFlg) {
4585        m_afFlashCnt = IF_FLASH_AF_OFF;
4586    }
4587    switch (m_afState) {
4588    case HAL_AFSTATE_INACTIVE:
4589        nextState = NO_TRANSITION;
4590        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
4591        break;
4592    case HAL_AFSTATE_NEEDS_COMMAND:
4593    case HAL_AFSTATE_STARTED:
4594    case HAL_AFSTATE_SCANNING:
4595    case HAL_AFSTATE_LOCKED:
4596    case HAL_AFSTATE_FAILED:
4597        SetAfMode(AA_AFMODE_OFF);
4598        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
4599        nextState = HAL_AFSTATE_INACTIVE;
4600        break;
4601    default:
4602        break;
4603    }
4604    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4605    if (nextState != NO_TRANSITION)
4606        m_afState = nextState;
4607}
4608
4609void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int id)
4610{
4611    int nextState = NO_TRANSITION;
4612    m_afTriggerId = id;
4613
4614    switch (m_afState) {
4615    case HAL_AFSTATE_INACTIVE:
4616        nextState = NO_TRANSITION;
4617        break;
4618    case HAL_AFSTATE_NEEDS_COMMAND:
4619    case HAL_AFSTATE_STARTED:
4620    case HAL_AFSTATE_SCANNING:
4621    case HAL_AFSTATE_LOCKED:
4622    case HAL_AFSTATE_FAILED:
4623    case HAL_AFSTATE_NEEDS_DETERMINATION:
4624    case HAL_AFSTATE_PASSIVE_FOCUSED:
4625        SetAfMode(AA_AFMODE_OFF);
4626        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
4627        SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE);
4628        nextState = HAL_AFSTATE_INACTIVE;
4629        break;
4630    default:
4631        break;
4632    }
4633    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4634    if (nextState != NO_TRANSITION)
4635        m_afState = nextState;
4636}
4637
4638void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int id)
4639{
4640    int nextState = NO_TRANSITION;
4641    m_afTriggerId = id;
4642
4643    switch (m_afState) {
4644    case HAL_AFSTATE_INACTIVE:
4645        nextState = NO_TRANSITION;
4646        break;
4647    case HAL_AFSTATE_NEEDS_COMMAND:
4648    case HAL_AFSTATE_STARTED:
4649    case HAL_AFSTATE_SCANNING:
4650    case HAL_AFSTATE_LOCKED:
4651    case HAL_AFSTATE_FAILED:
4652    case HAL_AFSTATE_NEEDS_DETERMINATION:
4653    case HAL_AFSTATE_PASSIVE_FOCUSED:
4654        SetAfMode(AA_AFMODE_OFF);
4655        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
4656        SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO);
4657        nextState = HAL_AFSTATE_INACTIVE;
4658        break;
4659    default:
4660        break;
4661    }
4662    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4663    if (nextState != NO_TRANSITION)
4664        m_afState = nextState;
4665}
4666
4667void ExynosCameraHWInterface2::SetAfStateForService(int newState)
4668{
4669    if (m_serviceAfState != newState || newState == 0)
4670        m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie);
4671    m_serviceAfState = newState;
4672}
4673
4674int ExynosCameraHWInterface2::GetAfStateForService()
4675{
4676   return m_serviceAfState;
4677}
4678
4679void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode)
4680{
4681    if (m_afMode != afMode) {
4682        if (m_IsAfModeUpdateRequired) {
4683            m_afMode2 = afMode;
4684            ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode);
4685        }
4686        else {
4687            ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode);
4688            m_IsAfModeUpdateRequired = true;
4689            m_afMode = afMode;
4690            if (m_afModeWaitingCnt != 0) {
4691                m_afModeWaitingCnt = 0;
4692                OnAfTrigger(m_afPendingTriggerId);
4693            }
4694        }
4695    }
4696}
4697
4698void ExynosCameraHWInterface2::m_setExifFixedAttribute(void)
4699{
4700    char property[PROPERTY_VALUE_MAX];
4701
4702    //2 0th IFD TIFF Tags
4703#if 0 // STOPSHIP TODO(aray): remove before launch, but for now don't leak product data
4704    //3 Maker
4705    property_get("ro.product.brand", property, EXIF_DEF_MAKER);
4706    strncpy((char *)mExifInfo.maker, property,
4707                sizeof(mExifInfo.maker) - 1);
4708    mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0';
4709    //3 Model
4710    property_get("ro.product.model", property, EXIF_DEF_MODEL);
4711    strncpy((char *)mExifInfo.model, property,
4712                sizeof(mExifInfo.model) - 1);
4713    mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0';
4714    //3 Software
4715    property_get("ro.build.id", property, EXIF_DEF_SOFTWARE);
4716    strncpy((char *)mExifInfo.software, property,
4717                sizeof(mExifInfo.software) - 1);
4718    mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0';
4719#endif
4720
4721    //3 YCbCr Positioning
4722    mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING;
4723
4724    //2 0th IFD Exif Private Tags
4725    //3 F Number
4726    mExifInfo.fnumber.num = EXIF_DEF_FNUMBER_NUM;
4727    mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN;
4728    //3 Exposure Program
4729    mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM;
4730    //3 Exif Version
4731    memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version));
4732    //3 Aperture
4733    uint32_t av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den);
4734    mExifInfo.aperture.num = av*EXIF_DEF_APEX_DEN;
4735    mExifInfo.aperture.den = EXIF_DEF_APEX_DEN;
4736    //3 Maximum lens aperture
4737    mExifInfo.max_aperture.num = mExifInfo.aperture.num;
4738    mExifInfo.max_aperture.den = mExifInfo.aperture.den;
4739    //3 Lens Focal Length
4740    mExifInfo.focal_length.num = EXIF_DEF_FOCAL_LEN_NUM;
4741    mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN;
4742    //3 User Comments
4743    strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS);
4744    //3 Color Space information
4745    mExifInfo.color_space = EXIF_DEF_COLOR_SPACE;
4746    //3 Exposure Mode
4747    mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE;
4748
4749    //2 0th IFD GPS Info Tags
4750    unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 };
4751    memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version));
4752
4753    //2 1th IFD TIFF Tags
4754    mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION;
4755    mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM;
4756    mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN;
4757    mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM;
4758    mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN;
4759    mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT;
4760}
4761
4762void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect,
4763	camera2_shot *currentEntry)
4764{
4765    camera2_dm *dm = &(currentEntry->dm);
4766    camera2_ctl *ctl = &(currentEntry->ctl);
4767
4768    ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue );
4769    if (!ctl->request.frameCount)
4770       return;
4771    //2 0th IFD TIFF Tags
4772    //3 Width
4773    exifInfo->width = rect->w;
4774    //3 Height
4775    exifInfo->height = rect->h;
4776    //3 Orientation
4777    switch (ctl->jpeg.orientation) {
4778    case 90:
4779        exifInfo->orientation = EXIF_ORIENTATION_90;
4780        break;
4781    case 180:
4782        exifInfo->orientation = EXIF_ORIENTATION_180;
4783        break;
4784    case 270:
4785        exifInfo->orientation = EXIF_ORIENTATION_270;
4786        break;
4787    case 0:
4788    default:
4789        exifInfo->orientation = EXIF_ORIENTATION_UP;
4790        break;
4791    }
4792
4793    //3 Date time
4794    time_t rawtime;
4795    struct tm *timeinfo;
4796    time(&rawtime);
4797    timeinfo = localtime(&rawtime);
4798    strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo);
4799
4800    //2 0th IFD Exif Private Tags
4801    //3 Exposure Time
4802    int shutterSpeed = (dm->sensor.exposureTime/1000);
4803
4804    if (shutterSpeed < 0) {
4805        shutterSpeed = 100;
4806    }
4807
4808    exifInfo->exposure_time.num = 1;
4809    // x us -> 1/x s */
4810    //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed);
4811    exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed);
4812
4813    //3 ISO Speed Rating
4814    exifInfo->iso_speed_rating = dm->aa.isoValue;
4815
4816    uint32_t av, tv, bv, sv, ev;
4817    av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den);
4818    tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den);
4819    sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating);
4820    bv = av + tv - sv;
4821    ev = av + tv;
4822    //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating);
4823    ALOGD("AV=%d, TV=%d, SV=%d", av, tv, sv);
4824
4825    //3 Shutter Speed
4826    exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN;
4827    exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN;
4828    //3 Brightness
4829    exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN;
4830    exifInfo->brightness.den = EXIF_DEF_APEX_DEN;
4831    //3 Exposure Bias
4832    if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH||
4833        ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) {
4834        exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN;
4835        exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN;
4836    } else {
4837        exifInfo->exposure_bias.num = 0;
4838        exifInfo->exposure_bias.den = 0;
4839    }
4840    //3 Metering Mode
4841    /*switch (m_curCameraInfo->metering) {
4842    case METERING_MODE_CENTER:
4843        exifInfo->metering_mode = EXIF_METERING_CENTER;
4844        break;
4845    case METERING_MODE_MATRIX:
4846        exifInfo->metering_mode = EXIF_METERING_MULTISPOT;
4847        break;
4848    case METERING_MODE_SPOT:
4849        exifInfo->metering_mode = EXIF_METERING_SPOT;
4850        break;
4851    case METERING_MODE_AVERAGE:
4852    default:
4853        exifInfo->metering_mode = EXIF_METERING_AVERAGE;
4854        break;
4855    }*/
4856    exifInfo->metering_mode = EXIF_METERING_CENTER;
4857
4858    //3 Flash
4859    int flash = dm->flash.flashMode;
4860    if (dm->flash.flashMode == FLASH_MODE_OFF || flash < 0)
4861        exifInfo->flash = EXIF_DEF_FLASH;
4862    else
4863        exifInfo->flash = flash;
4864
4865    //3 White Balance
4866    if (dm->aa.awbMode == AA_AWBMODE_WB_AUTO)
4867        exifInfo->white_balance = EXIF_WB_AUTO;
4868    else
4869        exifInfo->white_balance = EXIF_WB_MANUAL;
4870
4871    //3 Scene Capture Type
4872    switch (ctl->aa.sceneMode) {
4873    case AA_SCENE_MODE_PORTRAIT:
4874        exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT;
4875        break;
4876    case AA_SCENE_MODE_LANDSCAPE:
4877        exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE;
4878        break;
4879    case AA_SCENE_MODE_NIGHT_PORTRAIT:
4880        exifInfo->scene_capture_type = EXIF_SCENE_NIGHT;
4881        break;
4882    default:
4883        exifInfo->scene_capture_type = EXIF_SCENE_STANDARD;
4884        break;
4885    }
4886
4887    //2 0th IFD GPS Info Tags
4888    if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) {
4889
4890        if (ctl->jpeg.gpsCoordinates[0] > 0)
4891            strcpy((char *)exifInfo->gps_latitude_ref, "N");
4892        else
4893            strcpy((char *)exifInfo->gps_latitude_ref, "S");
4894
4895        if (ctl->jpeg.gpsCoordinates[1] > 0)
4896            strcpy((char *)exifInfo->gps_longitude_ref, "E");
4897        else
4898            strcpy((char *)exifInfo->gps_longitude_ref, "W");
4899
4900        if (ctl->jpeg.gpsCoordinates[2] > 0)
4901            exifInfo->gps_altitude_ref = 0;
4902        else
4903            exifInfo->gps_altitude_ref = 1;
4904
4905        double latitude = fabs(ctl->jpeg.gpsCoordinates[0] / 10000.0);
4906        double longitude = fabs(ctl->jpeg.gpsCoordinates[1] / 10000.0);
4907        double altitude = fabs(ctl->jpeg.gpsCoordinates[2] / 100.0);
4908
4909        exifInfo->gps_latitude[0].num = (uint32_t)latitude;
4910        exifInfo->gps_latitude[0].den = 1;
4911        exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60);
4912        exifInfo->gps_latitude[1].den = 1;
4913        exifInfo->gps_latitude[2].num = (uint32_t)((((latitude - exifInfo->gps_latitude[0].num) * 60)
4914                                        - exifInfo->gps_latitude[1].num) * 60);
4915        exifInfo->gps_latitude[2].den = 1;
4916
4917        exifInfo->gps_longitude[0].num = (uint32_t)longitude;
4918        exifInfo->gps_longitude[0].den = 1;
4919        exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60);
4920        exifInfo->gps_longitude[1].den = 1;
4921        exifInfo->gps_longitude[2].num = (uint32_t)((((longitude - exifInfo->gps_longitude[0].num) * 60)
4922                                        - exifInfo->gps_longitude[1].num) * 60);
4923        exifInfo->gps_longitude[2].den = 1;
4924
4925        exifInfo->gps_altitude.num = (uint32_t)altitude;
4926        exifInfo->gps_altitude.den = 1;
4927
4928        struct tm tm_data;
4929        long timestamp;
4930        timestamp = (long)ctl->jpeg.gpsTimestamp;
4931        gmtime_r(&timestamp, &tm_data);
4932        exifInfo->gps_timestamp[0].num = tm_data.tm_hour;
4933        exifInfo->gps_timestamp[0].den = 1;
4934        exifInfo->gps_timestamp[1].num = tm_data.tm_min;
4935        exifInfo->gps_timestamp[1].den = 1;
4936        exifInfo->gps_timestamp[2].num = tm_data.tm_sec;
4937        exifInfo->gps_timestamp[2].den = 1;
4938        snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp),
4939                "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday);
4940
4941        exifInfo->enableGps = true;
4942    } else {
4943        exifInfo->enableGps = false;
4944    }
4945
4946    //2 1th IFD TIFF Tags
4947    exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0];
4948    exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1];
4949}
4950
4951ExynosCameraHWInterface2::MainThread::~MainThread()
4952{
4953    ALOGV("(%s):", __FUNCTION__);
4954}
4955
4956void ExynosCameraHWInterface2::MainThread::release()
4957{
4958    ALOGV("(%s):", __func__);
4959    SetSignal(SIGNAL_THREAD_RELEASE);
4960}
4961
4962ExynosCameraHWInterface2::SensorThread::~SensorThread()
4963{
4964    ALOGV("(%s):", __FUNCTION__);
4965}
4966
4967void ExynosCameraHWInterface2::SensorThread::release()
4968{
4969    ALOGV("(%s):", __func__);
4970    SetSignal(SIGNAL_THREAD_RELEASE);
4971}
4972
4973ExynosCameraHWInterface2::IspThread::~IspThread()
4974{
4975    ALOGV("(%s):", __FUNCTION__);
4976}
4977
4978void ExynosCameraHWInterface2::IspThread::release()
4979{
4980    ALOGV("(%s):", __func__);
4981    SetSignal(SIGNAL_THREAD_RELEASE);
4982}
4983
4984ExynosCameraHWInterface2::StreamThread::~StreamThread()
4985{
4986    ALOGV("(%s):", __FUNCTION__);
4987}
4988
4989void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
4990{
4991    ALOGV("DEBUG(%s):", __FUNCTION__);
4992
4993    m_tempParameters = new_parameters;
4994
4995    SetSignal(SIGNAL_STREAM_CHANGE_PARAMETER);
4996
4997    // TODO : return synchronously (after setting parameters asynchronously)
4998    usleep(2000);
4999}
5000
5001void ExynosCameraHWInterface2::StreamThread::applyChange()
5002{
5003    memcpy(&m_parameters, m_tempParameters, sizeof(stream_parameters_t));
5004
5005    ALOGV("DEBUG(%s):  Applying Stream paremeters  width(%d), height(%d)",
5006            __FUNCTION__, m_parameters.outputWidth, m_parameters.outputHeight);
5007}
5008
5009void ExynosCameraHWInterface2::StreamThread::release()
5010{
5011    ALOGV("(%s):", __func__);
5012    SetSignal(SIGNAL_THREAD_RELEASE);
5013}
5014
5015int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr)
5016{
5017    int index;
5018    for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
5019        if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr)
5020            return index;
5021    }
5022    return -1;
5023}
5024
5025void ExynosCameraHWInterface2::StreamThread::setRecordingParameter(record_parameters_t * recordParm)
5026{
5027    memcpy(&m_recordParameters, recordParm, sizeof(record_parameters_t));
5028}
5029
5030void ExynosCameraHWInterface2::StreamThread::setCallbackParameter(callback_parameters_t * callbackParm)
5031{
5032    memcpy(&m_previewCbParameters, callbackParm, sizeof(callback_parameters_t));
5033}
5034
5035int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
5036{
5037    if (ionClient == 0) {
5038        ionClient = ion_client_create();
5039        if (ionClient < 0) {
5040            ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient);
5041            return 0;
5042        }
5043    }
5044
5045    return ionClient;
5046}
5047
5048int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient)
5049{
5050    if (ionClient != 0) {
5051        if (ionClient > 0) {
5052            ion_client_destroy(ionClient);
5053        }
5054        ionClient = 0;
5055    }
5056
5057    return ionClient;
5058}
5059
5060int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum)
5061{
5062    int ret = 0;
5063    int i = 0;
5064
5065    if (ionClient == 0) {
5066        ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient);
5067        return -1;
5068    }
5069
5070    for (i=0;i<iMemoryNum;i++) {
5071        if (buf->size.extS[i] == 0) {
5072            break;
5073        }
5074
5075        buf->fd.extFd[i] = ion_alloc(ionClient, \
5076                                      buf->size.extS[i], 0, ION_HEAP_EXYNOS_MASK,0);
5077        if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) {
5078            ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]);
5079            buf->fd.extFd[i] = -1;
5080            freeCameraMemory(buf, iMemoryNum);
5081            return -1;
5082        }
5083
5084        buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \
5085                                        buf->size.extS[i], 0);
5086        if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) {
5087            ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]);
5088            buf->virt.extP[i] = (char *)MAP_FAILED;
5089            freeCameraMemory(buf, iMemoryNum);
5090            return -1;
5091        }
5092        ALOGV("allocCameraMem : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]);
5093    }
5094
5095    return ret;
5096}
5097
5098void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum)
5099{
5100
5101    int i =0 ;
5102    int ret = 0;
5103
5104    for (i=0;i<iMemoryNum;i++) {
5105        if (buf->fd.extFd[i] != -1) {
5106            if (buf->virt.extP[i] != (char *)MAP_FAILED) {
5107                ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
5108                if (ret < 0)
5109                    ALOGE("ERR(%s)", __FUNCTION__);
5110            }
5111            ion_free(buf->fd.extFd[i]);
5112        }
5113        buf->fd.extFd[i] = -1;
5114        buf->virt.extP[i] = (char *)MAP_FAILED;
5115        buf->size.extS[i] = 0;
5116    }
5117}
5118
5119void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum)
5120{
5121    int i =0 ;
5122    for (i=0;i<iMemoryNum;i++) {
5123        buf->virt.extP[i] = (char *)MAP_FAILED;
5124        buf->fd.extFd[i] = -1;
5125        buf->size.extS[i] = 0;
5126    }
5127}
5128
5129
5130
5131
5132static camera2_device_t *g_cam2_device = NULL;
5133static bool g_camera_vaild = false;
5134ExynosCamera2 * g_camera2[2] = { NULL, NULL };
5135
5136static int HAL2_camera_device_close(struct hw_device_t* device)
5137{
5138    ALOGV("%s: ENTER", __FUNCTION__);
5139    if (device) {
5140
5141        camera2_device_t *cam_device = (camera2_device_t *)device;
5142        ALOGV("cam_device(0x%08x):", (unsigned int)cam_device);
5143        ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
5144        delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
5145        g_cam2_device = NULL;
5146        free(cam_device);
5147        g_camera_vaild = false;
5148    }
5149
5150    ALOGV("%s: EXIT", __FUNCTION__);
5151    return 0;
5152}
5153
5154static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev)
5155{
5156    return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv);
5157}
5158
5159static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev,
5160            const camera2_request_queue_src_ops_t *request_src_ops)
5161{
5162    ALOGV("DEBUG(%s):", __FUNCTION__);
5163    return obj(dev)->setRequestQueueSrcOps(request_src_ops);
5164}
5165
5166static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev)
5167{
5168    ALOGV("DEBUG(%s):", __FUNCTION__);
5169    return obj(dev)->notifyRequestQueueNotEmpty();
5170}
5171
5172static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev,
5173            const camera2_frame_queue_dst_ops_t *frame_dst_ops)
5174{
5175    ALOGV("DEBUG(%s):", __FUNCTION__);
5176    return obj(dev)->setFrameQueueDstOps(frame_dst_ops);
5177}
5178
5179static int HAL2_device_get_in_progress_count(const struct camera2_device *dev)
5180{
5181    ALOGV("DEBUG(%s):", __FUNCTION__);
5182    return obj(dev)->getInProgressCount();
5183}
5184
5185static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev)
5186{
5187    ALOGV("DEBUG(%s):", __FUNCTION__);
5188    return obj(dev)->flushCapturesInProgress();
5189}
5190
5191static int HAL2_device_construct_default_request(const struct camera2_device *dev,
5192            int request_template, camera_metadata_t **request)
5193{
5194    ALOGV("DEBUG(%s):", __FUNCTION__);
5195    return obj(dev)->constructDefaultRequest(request_template, request);
5196}
5197
5198static int HAL2_device_allocate_stream(
5199            const struct camera2_device *dev,
5200            // inputs
5201            uint32_t width,
5202            uint32_t height,
5203            int      format,
5204            const camera2_stream_ops_t *stream_ops,
5205            // outputs
5206            uint32_t *stream_id,
5207            uint32_t *format_actual,
5208            uint32_t *usage,
5209            uint32_t *max_buffers)
5210{
5211    ALOGV("(%s): ", __FUNCTION__);
5212    return obj(dev)->allocateStream(width, height, format, stream_ops,
5213                                    stream_id, format_actual, usage, max_buffers);
5214}
5215
5216
5217static int HAL2_device_register_stream_buffers(const struct camera2_device *dev,
5218            uint32_t stream_id,
5219            int num_buffers,
5220            buffer_handle_t *buffers)
5221{
5222    ALOGV("DEBUG(%s):", __FUNCTION__);
5223    return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers);
5224}
5225
5226static int HAL2_device_release_stream(
5227        const struct camera2_device *dev,
5228            uint32_t stream_id)
5229{
5230    ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
5231    if (!g_camera_vaild)
5232        return 0;
5233    return obj(dev)->releaseStream(stream_id);
5234}
5235
5236static int HAL2_device_allocate_reprocess_stream(
5237           const struct camera2_device *dev,
5238            uint32_t width,
5239            uint32_t height,
5240            uint32_t format,
5241            const camera2_stream_in_ops_t *reprocess_stream_ops,
5242            // outputs
5243            uint32_t *stream_id,
5244            uint32_t *consumer_usage,
5245            uint32_t *max_buffers)
5246{
5247    ALOGV("DEBUG(%s):", __FUNCTION__);
5248    return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops,
5249                                    stream_id, consumer_usage, max_buffers);
5250}
5251
5252static int HAL2_device_release_reprocess_stream(
5253        const struct camera2_device *dev,
5254            uint32_t stream_id)
5255{
5256    ALOGV("DEBUG(%s):", __FUNCTION__);
5257    return obj(dev)->releaseReprocessStream(stream_id);
5258}
5259
5260static int HAL2_device_trigger_action(const struct camera2_device *dev,
5261           uint32_t trigger_id,
5262            int ext1,
5263            int ext2)
5264{
5265    ALOGV("DEBUG(%s):", __FUNCTION__);
5266    return obj(dev)->triggerAction(trigger_id, ext1, ext2);
5267}
5268
5269static int HAL2_device_set_notify_callback(const struct camera2_device *dev,
5270            camera2_notify_callback notify_cb,
5271            void *user)
5272{
5273    ALOGV("DEBUG(%s):", __FUNCTION__);
5274    return obj(dev)->setNotifyCallback(notify_cb, user);
5275}
5276
5277static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev,
5278            vendor_tag_query_ops_t **ops)
5279{
5280    ALOGV("DEBUG(%s):", __FUNCTION__);
5281    return obj(dev)->getMetadataVendorTagOps(ops);
5282}
5283
5284static int HAL2_device_dump(const struct camera2_device *dev, int fd)
5285{
5286    ALOGV("DEBUG(%s):", __FUNCTION__);
5287    return obj(dev)->dump(fd);
5288}
5289
5290
5291
5292
5293
5294static int HAL2_getNumberOfCameras()
5295{
5296    ALOGV("(%s): returning 2", __FUNCTION__);
5297    return 2;
5298}
5299
5300
5301static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
5302{
5303    ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
5304    static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
5305
5306    status_t res;
5307
5308    if (cameraId == 0) {
5309        info->facing = CAMERA_FACING_BACK;
5310        if (!g_camera2[0])
5311            g_camera2[0] = new ExynosCamera2(0);
5312    }
5313    else if (cameraId == 1) {
5314        info->facing = CAMERA_FACING_FRONT;
5315        if (!g_camera2[1])
5316            g_camera2[1] = new ExynosCamera2(1);
5317    }
5318    else
5319        return BAD_VALUE;
5320
5321    info->orientation = 0;
5322    info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0);
5323    if (mCameraInfo[cameraId] == NULL) {
5324        res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true);
5325        if (res != OK) {
5326            ALOGE("%s: Unable to allocate static info: %s (%d)",
5327                    __FUNCTION__, strerror(-res), res);
5328            return res;
5329        }
5330        res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false);
5331        if (res != OK) {
5332            ALOGE("%s: Unable to fill in static info: %s (%d)",
5333                    __FUNCTION__, strerror(-res), res);
5334            return res;
5335        }
5336    }
5337    info->static_camera_characteristics = mCameraInfo[cameraId];
5338    return NO_ERROR;
5339}
5340
5341#define SET_METHOD(m) m : HAL2_device_##m
5342
5343static camera2_device_ops_t camera2_device_ops = {
5344        SET_METHOD(set_request_queue_src_ops),
5345        SET_METHOD(notify_request_queue_not_empty),
5346        SET_METHOD(set_frame_queue_dst_ops),
5347        SET_METHOD(get_in_progress_count),
5348        SET_METHOD(flush_captures_in_progress),
5349        SET_METHOD(construct_default_request),
5350        SET_METHOD(allocate_stream),
5351        SET_METHOD(register_stream_buffers),
5352        SET_METHOD(release_stream),
5353        SET_METHOD(allocate_reprocess_stream),
5354        SET_METHOD(release_reprocess_stream),
5355        SET_METHOD(trigger_action),
5356        SET_METHOD(set_notify_callback),
5357        SET_METHOD(get_metadata_vendor_tag_ops),
5358        SET_METHOD(dump),
5359};
5360
5361#undef SET_METHOD
5362
5363
5364static int HAL2_camera_device_open(const struct hw_module_t* module,
5365                                  const char *id,
5366                                  struct hw_device_t** device)
5367{
5368
5369
5370    int cameraId = atoi(id);
5371    int openInvalid = 0;
5372
5373    g_camera_vaild = false;
5374    ALOGV("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
5375    if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
5376        ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
5377        return -EINVAL;
5378    }
5379
5380    ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
5381    if (g_cam2_device) {
5382        if (obj(g_cam2_device)->getCameraId() == cameraId) {
5383            ALOGV("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
5384            goto done;
5385        } else {
5386
5387            while (g_cam2_device)
5388                usleep(10000);
5389        }
5390    }
5391
5392    g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
5393    ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
5394
5395    if (!g_cam2_device)
5396        return -ENOMEM;
5397
5398    g_cam2_device->common.tag     = HARDWARE_DEVICE_TAG;
5399    g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0;
5400    g_cam2_device->common.module  = const_cast<hw_module_t *>(module);
5401    g_cam2_device->common.close   = HAL2_camera_device_close;
5402
5403    g_cam2_device->ops = &camera2_device_ops;
5404
5405    ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id);
5406
5407    g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId], &openInvalid);
5408    if (!openInvalid) {
5409        ALOGE("DEBUG(%s): ExynosCameraHWInterface2 creation failed(%d)", __FUNCTION__);
5410        return -ENODEV;
5411    }
5412done:
5413    *device = (hw_device_t *)g_cam2_device;
5414    ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device);
5415    g_camera_vaild = true;
5416
5417    return 0;
5418}
5419
5420
5421static hw_module_methods_t camera_module_methods = {
5422            open : HAL2_camera_device_open
5423};
5424
5425extern "C" {
5426    struct camera_module HAL_MODULE_INFO_SYM = {
5427      common : {
5428          tag                : HARDWARE_MODULE_TAG,
5429          module_api_version : CAMERA_MODULE_API_VERSION_2_0,
5430          hal_api_version    : HARDWARE_HAL_API_VERSION,
5431          id                 : CAMERA_HARDWARE_MODULE_ID,
5432          name               : "Exynos Camera HAL2",
5433          author             : "Samsung Corporation",
5434          methods            : &camera_module_methods,
5435          dso:                NULL,
5436          reserved:           {0},
5437      },
5438      get_number_of_cameras : HAL2_getNumberOfCameras,
5439      get_camera_info       : HAL2_getCameraInfo
5440    };
5441}
5442
5443}; // namespace android
5444