ExynosCameraHWInterface2.cpp revision eed7ed1bffb083b112a3366e740ebdc186203afa
1/*
2**
3** Copyright 2008, The Android Open Source Project
4** Copyright 2012, Samsung Electronics Co. LTD
5**
6** Licensed under the Apache License, Version 2.0 (the "License");
7** you may not use this file except in compliance with the License.
8** You may obtain a copy of the License at
9**
10**     http://www.apache.org/licenses/LICENSE-2.0
11**
12** Unless required by applicable law or agreed to in writing, software
13** distributed under the License is distributed on an "AS IS" BASIS,
14** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15** See the License for the specific language governing permissions and
16** limitations under the License.
17*/
18
19/*!
20 * \file      ExynosCameraHWInterface2.cpp
21 * \brief     source file for Android Camera API 2.0 HAL
22 * \author    Sungjoong Kang(sj3.kang@samsung.com)
23 * \date      2012/07/10
24 *
25 * <b>Revision History: </b>
26 * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
27 *   Initial Release
28 *
29 * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
30 *   2nd Release
31 *
32 */
33
34//#define LOG_NDEBUG 0
35#define LOG_TAG "ExynosCameraHAL2"
36#include <utils/Log.h>
37
38#include "ExynosCameraHWInterface2.h"
39#include "exynos_format.h"
40
41
42
43namespace android {
44
45
46void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
47{
48    int nw;
49    int cnt = 0;
50    uint32_t written = 0;
51
52    ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
53    int fd = open(fname, O_RDWR | O_CREAT, 0644);
54    if (fd < 0) {
55        ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
56        return;
57    }
58
59    ALOGV("writing %d bytes to file [%s]", size, fname);
60    while (written < size) {
61        nw = ::write(fd, buf + written, size - written);
62        if (nw < 0) {
63            ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
64            break;
65        }
66        written += nw;
67        cnt++;
68    }
69    ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
70    ::close(fd);
71}
72
73int get_pixel_depth(uint32_t fmt)
74{
75    int depth = 0;
76
77    switch (fmt) {
78    case V4L2_PIX_FMT_JPEG:
79        depth = 8;
80        break;
81
82    case V4L2_PIX_FMT_NV12:
83    case V4L2_PIX_FMT_NV21:
84    case V4L2_PIX_FMT_YUV420:
85    case V4L2_PIX_FMT_YVU420M:
86    case V4L2_PIX_FMT_NV12M:
87    case V4L2_PIX_FMT_NV12MT:
88        depth = 12;
89        break;
90
91    case V4L2_PIX_FMT_RGB565:
92    case V4L2_PIX_FMT_YUYV:
93    case V4L2_PIX_FMT_YVYU:
94    case V4L2_PIX_FMT_UYVY:
95    case V4L2_PIX_FMT_VYUY:
96    case V4L2_PIX_FMT_NV16:
97    case V4L2_PIX_FMT_NV61:
98    case V4L2_PIX_FMT_YUV422P:
99    case V4L2_PIX_FMT_SBGGR10:
100    case V4L2_PIX_FMT_SBGGR12:
101    case V4L2_PIX_FMT_SBGGR16:
102        depth = 16;
103        break;
104
105    case V4L2_PIX_FMT_RGB32:
106        depth = 32;
107        break;
108    default:
109        ALOGE("Get depth failed(format : %d)", fmt);
110        break;
111    }
112
113    return depth;
114}
115
116int cam_int_s_fmt(node_info_t *node)
117{
118    struct v4l2_format v4l2_fmt;
119    unsigned int framesize;
120    int ret;
121
122    memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
123
124    v4l2_fmt.type = node->type;
125    framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
126
127    if (node->planes >= 1) {
128        v4l2_fmt.fmt.pix_mp.width       = node->width;
129        v4l2_fmt.fmt.pix_mp.height      = node->height;
130        v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
131        v4l2_fmt.fmt.pix_mp.field       = V4L2_FIELD_ANY;
132    } else {
133        ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
134    }
135
136    /* Set up for capture */
137    ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
138
139    if (ret < 0)
140        ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
141
142
143    return ret;
144}
145
146int cam_int_reqbufs(node_info_t *node)
147{
148    struct v4l2_requestbuffers req;
149    int ret;
150
151    req.count = node->buffers;
152    req.type = node->type;
153    req.memory = node->memory;
154
155    ret = exynos_v4l2_reqbufs(node->fd, &req);
156
157    if (ret < 0)
158        ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
159
160    return req.count;
161}
162
163int cam_int_qbuf(node_info_t *node, int index)
164{
165    struct v4l2_buffer v4l2_buf;
166    struct v4l2_plane planes[VIDEO_MAX_PLANES];
167    int i;
168    int ret = 0;
169
170    v4l2_buf.m.planes   = planes;
171    v4l2_buf.type       = node->type;
172    v4l2_buf.memory     = node->memory;
173    v4l2_buf.index      = index;
174    v4l2_buf.length     = node->planes;
175
176    for(i = 0; i < node->planes; i++){
177        v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
178        v4l2_buf.m.planes[i].length  = (unsigned long)(node->buffer[index].size.extS[i]);
179    }
180
181    ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
182
183    if (ret < 0)
184        ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
185
186    return ret;
187}
188
189int cam_int_streamon(node_info_t *node)
190{
191    enum v4l2_buf_type type = node->type;
192    int ret;
193
194
195    ret = exynos_v4l2_streamon(node->fd, type);
196
197    if (ret < 0)
198        ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
199
200    ALOGV("On streaming I/O... ... fd(%d)", node->fd);
201
202    return ret;
203}
204
205int cam_int_streamoff(node_info_t *node)
206{
207    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
208    int ret;
209
210
211    ALOGV("Off streaming I/O... fd(%d)", node->fd);
212    ret = exynos_v4l2_streamoff(node->fd, type);
213
214    if (ret < 0)
215        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
216
217    return ret;
218}
219
220int isp_int_streamoff(node_info_t *node)
221{
222    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
223    int ret;
224
225    ALOGV("Off streaming I/O... fd(%d)", node->fd);
226    ret = exynos_v4l2_streamoff(node->fd, type);
227
228    if (ret < 0)
229        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
230
231    return ret;
232}
233
234int cam_int_dqbuf(node_info_t *node)
235{
236    struct v4l2_buffer v4l2_buf;
237    struct v4l2_plane planes[VIDEO_MAX_PLANES];
238    int ret;
239
240    v4l2_buf.type       = node->type;
241    v4l2_buf.memory     = node->memory;
242    v4l2_buf.m.planes   = planes;
243    v4l2_buf.length     = node->planes;
244
245    ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
246    if (ret < 0)
247        ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
248
249    return v4l2_buf.index;
250}
251
252int cam_int_s_input(node_info_t *node, int index)
253{
254    int ret;
255
256    ret = exynos_v4l2_s_input(node->fd, index);
257    if (ret < 0)
258        ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
259
260    return ret;
261}
262
263
264gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
265
266RequestManager::RequestManager(SignalDrivenThread* main_thread):
267    m_numOfEntries(0),
268    m_entryInsertionIndex(-1),
269    m_entryProcessingIndex(-1),
270    m_entryFrameOutputIndex(-1),
271    m_lastAeMode(0),
272    m_lastAaMode(0),
273    m_lastAwbMode(0),
274    m_lastAeComp(0),
275    m_frameIndex(-1)
276{
277    m_metadataConverter = new MetadataConverter;
278    m_mainThread = main_thread;
279    for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
280        memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
281        entries[i].internal_shot.shot.ctl.request.frameCount = -1;
282    }
283    m_sensorPipelineSkipCnt = 0;
284    return;
285}
286
287RequestManager::~RequestManager()
288{
289    ALOGV("%s", __FUNCTION__);
290    if (m_metadataConverter != NULL) {
291        delete m_metadataConverter;
292        m_metadataConverter = NULL;
293    }
294
295    return;
296}
297
298int RequestManager::GetNumEntries()
299{
300    return m_numOfEntries;
301}
302
303void RequestManager::SetDefaultParameters(int cropX)
304{
305    m_cropX = cropX;
306}
307
308bool RequestManager::IsRequestQueueFull()
309{
310    Mutex::Autolock lock(m_requestMutex);
311    if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
312        return true;
313    else
314        return false;
315}
316
317void RequestManager::RegisterRequest(camera_metadata_t * new_request)
318{
319    ALOGV("DEBUG(%s):", __FUNCTION__);
320
321    Mutex::Autolock lock(m_requestMutex);
322
323    request_manager_entry * newEntry = NULL;
324    int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
325    ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex,m_numOfEntries );
326
327
328    newEntry = &(entries[newInsertionIndex]);
329
330    if (newEntry->status!=EMPTY) {
331        ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
332        return;
333    }
334    newEntry->status = REGISTERED;
335    newEntry->original_request = new_request;
336    memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
337    m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
338    newEntry->output_stream_count = newEntry->internal_shot.shot.ctl.request.outputStreams[15];
339
340    m_numOfEntries++;
341    m_entryInsertionIndex = newInsertionIndex;
342
343
344    ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
345    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
346}
347
348void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
349{
350    ALOGV("DEBUG(%s):", __FUNCTION__);
351    int frame_index;
352    request_manager_entry * currentEntry;
353
354    Mutex::Autolock lock(m_requestMutex);
355
356    frame_index = GetFrameIndex();
357    currentEntry =  &(entries[frame_index]);
358    if (currentEntry->status != CAPTURED) {
359        ALOGV("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__
360        , m_entryProcessingIndex, m_entryFrameOutputIndex,(int)(currentEntry->status));
361        return;
362    }
363    if (deregistered_request)  *deregistered_request = currentEntry->original_request;
364
365    currentEntry->status = EMPTY;
366    currentEntry->original_request = NULL;
367    memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
368    currentEntry->internal_shot.shot.ctl.request.frameCount = -1;
369    currentEntry->output_stream_count = 0;
370    currentEntry->dynamic_meta_vaild = false;
371    m_numOfEntries--;
372    ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
373     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
374
375    return;
376}
377
378bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
379                camera_metadata_t ** prepared_frame, int afState)
380{
381    ALOGV("DEBUG(%s):", __FUNCTION__);
382    Mutex::Autolock lock(m_requestMutex);
383    status_t res = NO_ERROR;
384    int tempFrameOutputIndex = GetFrameIndex();
385    request_manager_entry * currentEntry =  &(entries[tempFrameOutputIndex]);
386    ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
387        m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
388
389    if (currentEntry->status != CAPTURED) {
390        ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
391
392        return false;
393    }
394    m_entryFrameOutputIndex = tempFrameOutputIndex;
395    m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 15, 500); //estimated
396    add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1);
397    res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
398                m_tempFrameMetadata);
399    if (res!=NO_ERROR) {
400        ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
401        return false;
402    }
403    *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
404    *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
405    *prepared_frame = m_tempFrameMetadata;
406    ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d)", m_entryFrameOutputIndex,
407        currentEntry->internal_shot.shot.ctl.request.frameCount);
408    // Dump();
409    return true;
410}
411
412int RequestManager::MarkProcessingRequest(ExynosBuffer* buf, int *afMode)
413{
414
415    Mutex::Autolock lock(m_requestMutex);
416    struct camera2_shot_ext * shot_ext;
417    struct camera2_shot_ext * request_shot;
418    int targetStreamIndex = 0;
419    request_manager_entry * newEntry = NULL;
420    static int count = 0;
421
422    if (m_numOfEntries == 0)  {
423        ALOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
424        return -1;
425    }
426
427    if ((m_entryProcessingIndex == m_entryInsertionIndex)
428        && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
429        ALOGD("## MarkProcReq skipping(request underrun) -  num(%d), insert(%d), processing(%d), frame(%d)",
430         m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
431        return -1;
432    }
433
434    int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
435    ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
436
437    newEntry = &(entries[newProcessingIndex]);
438    request_shot = &(newEntry->internal_shot);
439    *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode);
440    if (newEntry->status != REGISTERED) {
441        ALOGD("DEBUG(%s)(%d): Circular buffer abnormal ", __FUNCTION__, newProcessingIndex);
442        return -1;
443    }
444
445    newEntry->status = REQUESTED;
446
447    shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
448
449    memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
450    shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
451    shot_ext->request_sensor = 1;
452    shot_ext->dis_bypass = 1;
453    shot_ext->dnr_bypass = 1;
454    shot_ext->fd_bypass = 1;
455    shot_ext->setfile = 0;
456
457    for (int i = 0; i < newEntry->output_stream_count; i++) {
458        targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[i];
459
460        if (targetStreamIndex==0) {
461            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP", __FUNCTION__, i);
462            shot_ext->request_scp = 1;
463        }
464        else if (targetStreamIndex == 1) {
465            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerC", __FUNCTION__, i);
466            shot_ext->request_scc = 1;
467        }
468        else if (targetStreamIndex == 2) {
469            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP (record)", __FUNCTION__, i);
470            shot_ext->request_scp = 1;
471            shot_ext->shot.ctl.request.outputStreams[2] = 1;
472        }
473        else {
474            ALOGV("DEBUG(%s): outputstreams(%d) has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
475        }
476    }
477
478    if (count == 0){
479        shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
480    } else
481        shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE;
482
483    count++;
484    shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
485    shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
486    shot_ext->shot.magicNumber = 0x23456789;
487    shot_ext->shot.ctl.sensor.exposureTime = 0;
488    shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
489    shot_ext->shot.ctl.sensor.sensitivity = 0;
490
491    shot_ext->shot.ctl.scaler.cropRegion[0] = 0;
492    shot_ext->shot.ctl.scaler.cropRegion[1] = 0;
493    shot_ext->shot.ctl.scaler.cropRegion[2] = m_cropX;
494
495    m_entryProcessingIndex = newProcessingIndex;
496    return newProcessingIndex;
497}
498
499void RequestManager::NotifyStreamOutput(int frameCnt, int stream_id)
500{
501    int index;
502
503    ALOGV("DEBUG(%s): frameCnt(%d), stream_id(%d)", __FUNCTION__, frameCnt, stream_id);
504
505    index = FindEntryIndexByFrameCnt(frameCnt);
506    if (index == -1) {
507        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
508        return;
509    }
510    ALOGV("DEBUG(%s): frameCnt(%d), stream_id(%d) last cnt (%d)", __FUNCTION__, frameCnt, stream_id,  entries[index].output_stream_count);
511
512    entries[index].output_stream_count--;  //TODO : match stream id also
513    CheckCompleted(index);
514    return;
515}
516
517void RequestManager::CheckCompleted(int index)
518{
519    ALOGV("DEBUG(%s): reqIndex(%d) current Count(%d)", __FUNCTION__, index, entries[index].output_stream_count);
520    SetFrameIndex(index);
521    m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
522    return;
523}
524
525void RequestManager::SetFrameIndex(int index)
526{
527    Mutex::Autolock lock(m_requestMutex);
528    m_frameIndex = index;
529}
530
531int RequestManager::GetFrameIndex()
532{
533    return m_frameIndex;
534}
535
536void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
537{
538    int index;
539    struct camera2_shot_ext * request_shot;
540    nsecs_t timeStamp;
541    int i;
542
543    ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
544
545    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
546        if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount)
547            && (entries[i].status == CAPTURED))
548            break;
549    }
550
551    if (i == NUM_MAX_REQUEST_MGR_ENTRY){
552        ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
553        return;
554    }
555
556    request_manager_entry * newEntry = &(entries[i]);
557    request_shot = &(newEntry->internal_shot);
558
559    newEntry->dynamic_meta_vaild = true;
560    timeStamp = request_shot->shot.dm.sensor.timeStamp;
561    memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
562    request_shot->shot.dm.sensor.timeStamp = timeStamp;
563    CheckCompleted(i);
564}
565
566void RequestManager::DumpInfoWithIndex(int index)
567{
568    struct camera2_shot_ext * currMetadata = &(entries[index].internal_shot);
569
570    ALOGV("####   frameCount(%d) exposureTime(%lld) ISO(%d)",
571        currMetadata->shot.ctl.request.frameCount,
572        currMetadata->shot.ctl.sensor.exposureTime,
573        currMetadata->shot.ctl.sensor.sensitivity);
574    if (currMetadata->shot.ctl.request.outputStreams[15] == 0)
575        ALOGV("####   No output stream selected");
576    else if (currMetadata->shot.ctl.request.outputStreams[15] == 1)
577        ALOGV("####   OutputStreamId : %d", currMetadata->shot.ctl.request.outputStreams[0]);
578    else if (currMetadata->shot.ctl.request.outputStreams[15] == 2)
579        ALOGV("####   OutputStreamId : %d, %d", currMetadata->shot.ctl.request.outputStreams[0],
580            currMetadata->shot.ctl.request.outputStreams[1]);
581    else
582        ALOGV("####   OutputStream num (%d) abnormal ", currMetadata->shot.ctl.request.outputStreams[15]);
583}
584
585void    RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, bool afTrigger)
586{
587    int index, targetStreamIndex;
588    struct camera2_shot_ext * request_shot;
589
590    ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
591    if (frameCnt < 0)
592        return;
593
594    index = FindEntryIndexByFrameCnt(frameCnt);
595    if (index == -1) {
596        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
597        return;
598    }
599
600    request_manager_entry * newEntry = &(entries[index]);
601    request_shot = &(newEntry->internal_shot);
602    memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl));
603    shot_ext->request_sensor = 1;
604    shot_ext->dis_bypass = 1;
605    shot_ext->dnr_bypass = 1;
606    shot_ext->fd_bypass = 1;
607    shot_ext->setfile = 0;
608
609    shot_ext->request_scc = 0;
610    shot_ext->request_scp = 0;
611
612    shot_ext->shot.ctl.request.outputStreams[0] = 0;
613    shot_ext->shot.ctl.request.outputStreams[1] = 0;
614    shot_ext->shot.ctl.request.outputStreams[2] = 0;
615
616    if (m_lastAaMode == request_shot->shot.ctl.aa.mode) {
617        shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0);
618    }
619    else {
620        shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode;
621        m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode);
622    }
623    if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) {
624        shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0);
625    }
626    else {
627        shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode;
628        m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode);
629    }
630    if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) {
631        shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0);
632    }
633    else {
634        shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode;
635        m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode);
636    }
637    if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) {
638        shot_ext->shot.ctl.aa.aeExpCompensation = 0;
639    }
640    else {
641        shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation;
642        m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation);
643    }
644    if (afTrigger) {
645        ALOGE("### AF Trigger ");
646        shot_ext->shot.ctl.aa.afTrigger = 1;
647        shot_ext->shot.ctl.aa.afRegions[0] = 0;
648        shot_ext->shot.ctl.aa.afRegions[1] = 0;
649        shot_ext->shot.ctl.aa.afRegions[2] = 0;
650        shot_ext->shot.ctl.aa.afRegions[3] = 0;
651        shot_ext->shot.ctl.aa.afRegions[4] = 0;
652    }
653    else
654        shot_ext->shot.ctl.aa.afTrigger = 0;
655    for (int i = 0; i < newEntry->output_stream_count; i++) {
656       targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[i];
657
658        if (targetStreamIndex==0) {
659            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP", __FUNCTION__, i);
660            shot_ext->request_scp = 1;
661        }
662        else if (targetStreamIndex == 1) {
663            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerC", __FUNCTION__, i);
664            shot_ext->request_scc = 1;
665        }
666        else if (targetStreamIndex == 2) {
667            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP (record)", __FUNCTION__, i);
668            shot_ext->request_scp = 1;
669            shot_ext->shot.ctl.request.outputStreams[2] = 1;
670            shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
671            shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
672        }
673        else {
674            ALOGV("DEBUG(%s): outputstreams(%d) has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
675        }
676    }
677        ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__,
678        (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
679        (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode),
680        (int)(shot_ext->shot.ctl.aa.afMode));
681}
682
683int     RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
684{
685    for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
686        if (entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt)
687            return i;
688    }
689    return -1;
690}
691
692void    RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
693{
694    int index = FindEntryIndexByFrameCnt(frameCnt);
695    if (index == -1) {
696        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
697        return;
698    }
699
700    request_manager_entry * currentEntry = &(entries[index]);
701    currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
702    ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
703        index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
704}
705
706uint64_t  RequestManager::GetTimestamp(int index)
707{
708    if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
709        ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
710        return 0;
711    }
712
713    request_manager_entry * currentEntry = &(entries[index]);
714    uint64_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp;
715    ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
716    return frameTime;
717}
718
719int     RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
720{
721    int i;
722
723    if (m_numOfEntries == 0) {
724        ALOGV("(%s): No Entry found", __FUNCTION__);
725        return -1;
726    }
727
728    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
729        if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
730            continue;
731
732        if (entries[i].status == REQUESTED) {
733            entries[i].status = CAPTURED;
734            return entries[i].internal_shot.shot.ctl.request.frameCount;
735        }
736
737    }
738
739    ALOGD("(%s): No Entry found", __FUNCTION__);
740
741    return -1;
742}
743
744void     RequestManager::SetInitialSkip(int count)
745{
746    ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt);
747    if (count > m_sensorPipelineSkipCnt)
748        m_sensorPipelineSkipCnt = count;
749}
750
751int     RequestManager::GetSkipCnt()
752{
753    ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
754    if (m_sensorPipelineSkipCnt == 0)
755        return m_sensorPipelineSkipCnt;
756    else
757        return --m_sensorPipelineSkipCnt;
758}
759
760void RequestManager::Dump(void)
761{
762    int i = 0;
763    request_manager_entry * currentEntry;
764    ALOGD("## Dump  totalentry(%d), insert(%d), processing(%d), frame(%d)",
765    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
766
767    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
768        currentEntry =  &(entries[i]);
769        ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%d outstream[1]-%d", i,
770        currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
771            currentEntry->output_stream_count,
772            currentEntry->internal_shot.shot.ctl.request.outputStreams[0],
773            currentEntry->internal_shot.shot.ctl.request.outputStreams[1]);
774    }
775}
776
777int     RequestManager::GetNextIndex(int index)
778{
779    index++;
780    if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
781        index = 0;
782
783    return index;
784}
785
786ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera):
787            m_requestQueueOps(NULL),
788            m_frameQueueOps(NULL),
789            m_callbackCookie(NULL),
790            m_numOfRemainingReqInSvc(0),
791            m_isRequestQueuePending(false),
792            m_isRequestQueueNull(true),
793            m_isSensorThreadOn(false),
794            m_isSensorStarted(false),
795            m_isIspStarted(false),
796            m_ionCameraClient(0),
797            m_initFlag1(false),
798            m_initFlag2(false),
799            m_scp_flushing(false),
800            m_closing(false),
801            m_recordingEnabled(false),
802            m_needsRecordBufferInit(false),
803            lastFrameCnt(-1),
804            m_scp_closing(false),
805            m_scp_closed(false),
806            m_afState(HAL_AFSTATE_INACTIVE),
807            m_afMode(NO_CHANGE),
808            m_afMode2(NO_CHANGE),
809            m_IsAfModeUpdateRequired(false),
810            m_IsAfTriggerRequired(false),
811            m_IsAfLockRequired(false),
812            m_wideAspect(false),
813            m_afTriggerId(0),
814            m_halDevice(dev),
815            m_need_streamoff(0),
816            m_cameraId(cameraId)
817{
818    ALOGV("DEBUG(%s):", __FUNCTION__);
819    int ret = 0;
820
821    m_exynosPictureCSC = NULL;
822    m_exynosVideoCSC = NULL;
823
824    if (!m_grallocHal) {
825        ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
826        if (ret)
827            ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
828    }
829
830    m_camera2 = camera;
831    m_ionCameraClient = createIonClient(m_ionCameraClient);
832    if(m_ionCameraClient == 0)
833        ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
834
835
836    m_BayerManager = new BayerBufManager();
837    m_mainThread    = new MainThread(this);
838    InitializeISPChain();
839    m_sensorThread  = new SensorThread(this);
840    m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
841    ALOGV("DEBUG(%s): created sensorthread ################", __FUNCTION__);
842
843    m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
844    CSC_METHOD cscMethod = CSC_METHOD_HW;
845    m_exynosPictureCSC = csc_init(cscMethod);
846    if (m_exynosPictureCSC == NULL)
847        ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
848    csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
849
850    m_exynosVideoCSC = csc_init(cscMethod);
851    if (m_exynosVideoCSC == NULL)
852        ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
853    csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
854
855
856    ALOGV("DEBUG(%s): END", __FUNCTION__);
857    m_setExifFixedAttribute();
858}
859
860ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
861{
862    ALOGV("%s: ENTER", __FUNCTION__);
863    this->release();
864    ALOGV("%s: EXIT", __FUNCTION__);
865}
866
867void ExynosCameraHWInterface2::release()
868{
869    int i, res;
870    ALOGD("%s: ENTER", __func__);
871    m_closing = true;
872
873    if (m_streamThreads[1] != NULL) {
874        m_streamThreads[1]->release();
875        m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
876    }
877
878    if (m_streamThreads[0] != NULL) {
879        m_streamThreads[0]->release();
880        m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
881    }
882
883    if (m_ispThread != NULL) {
884        m_ispThread->release();
885    }
886
887    if (m_sensorThread != NULL) {
888        m_sensorThread->release();
889    }
890
891    if (m_mainThread != NULL) {
892        m_mainThread->release();
893    }
894
895    if (m_exynosPictureCSC)
896        csc_deinit(m_exynosPictureCSC);
897    m_exynosPictureCSC = NULL;
898
899    if (m_exynosVideoCSC)
900        csc_deinit(m_exynosVideoCSC);
901    m_exynosVideoCSC = NULL;
902
903
904    if (m_streamThreads[1] != NULL) {
905        while (!m_streamThreads[1]->IsTerminated())
906        {
907            ALOGD("Waiting for ISP thread is tetminated");
908            usleep(100000);
909        }
910        m_streamThreads[1] = NULL;
911    }
912
913    if (m_streamThreads[0] != NULL) {
914        while (!m_streamThreads[0]->IsTerminated())
915        {
916            ALOGD("Waiting for sensor thread is tetminated");
917            usleep(100000);
918        }
919        m_streamThreads[0] = NULL;
920    }
921
922    if (m_ispThread != NULL) {
923        while (!m_ispThread->IsTerminated())
924        {
925            ALOGD("Waiting for isp thread is tetminated");
926            usleep(100000);
927        }
928        m_ispThread = NULL;
929    }
930
931    if (m_sensorThread != NULL) {
932        while (!m_sensorThread->IsTerminated())
933        {
934            ALOGD("Waiting for sensor thread is tetminated");
935            usleep(100000);
936        }
937        m_sensorThread = NULL;
938    }
939
940    if (m_mainThread != NULL) {
941        while (!m_mainThread->IsTerminated())
942        {
943            ALOGD("Waiting for main thread is tetminated");
944            usleep(100000);
945        }
946        m_mainThread = NULL;
947    }
948
949    if (m_requestManager != NULL) {
950        delete m_requestManager;
951        m_requestManager = NULL;
952    }
953
954    if (m_BayerManager != NULL) {
955        delete m_BayerManager;
956        m_BayerManager = NULL;
957    }
958//    for(i = 0; i < m_camera_info.sensor.buffers; i++)
959    for (i = 0; i < NUM_BAYER_BUFFERS; i++)
960        freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
961
962    for(i = 0; i < m_camera_info.capture.buffers; i++)
963        freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
964
965    ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
966    res = exynos_v4l2_close(m_camera_info.sensor.fd);
967    if (res != NO_ERROR ) {
968        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
969    }
970
971    ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
972    res = exynos_v4l2_close(m_camera_info.isp.fd);
973    if (res != NO_ERROR ) {
974        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
975    }
976
977    ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
978    res = exynos_v4l2_close(m_camera_info.capture.fd);
979    if (res != NO_ERROR ) {
980        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
981    }
982
983    ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
984    res = exynos_v4l2_close(m_fd_scp);
985    if (res != NO_ERROR ) {
986        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
987    }
988    ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
989    deleteIonClient(m_ionCameraClient);
990
991    ALOGV("%s: EXIT", __func__);
992}
993
994void ExynosCameraHWInterface2::InitializeISPChain()
995{
996    char node_name[30];
997    int fd = 0;
998    int i;
999
1000    /* Open Sensor */
1001    memset(&node_name, 0x00, sizeof(char[30]));
1002    sprintf(node_name, "%s%d", NODE_PREFIX, 40);
1003    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1004
1005    if (fd < 0) {
1006        ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1007    }
1008    else {
1009        ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1010    }
1011    m_camera_info.sensor.fd = fd;
1012
1013    /* Open ISP */
1014    memset(&node_name, 0x00, sizeof(char[30]));
1015    sprintf(node_name, "%s%d", NODE_PREFIX, 41);
1016    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1017
1018    if (fd < 0) {
1019        ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1020    }
1021    else {
1022        ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1023    }
1024    m_camera_info.isp.fd = fd;
1025
1026    /* Open ScalerC */
1027    memset(&node_name, 0x00, sizeof(char[30]));
1028    sprintf(node_name, "%s%d", NODE_PREFIX, 42);
1029    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1030
1031    if (fd < 0) {
1032        ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1033    }
1034    else {
1035        ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1036    }
1037    m_camera_info.capture.fd = fd;
1038
1039    /* Open ScalerP */
1040    memset(&node_name, 0x00, sizeof(char[30]));
1041    sprintf(node_name, "%s%d", NODE_PREFIX, 44);
1042    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1043    if (fd < 0) {
1044        ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1045    }
1046    else {
1047        ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1048    }
1049    m_fd_scp = fd;
1050
1051    if(m_cameraId == 0)
1052        m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
1053    else
1054        m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
1055
1056    memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
1057    m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
1058    m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
1059
1060    m_camera_info.dummy_shot.dis_bypass = 1;
1061    m_camera_info.dummy_shot.dnr_bypass = 1;
1062    m_camera_info.dummy_shot.fd_bypass = 1;
1063
1064    /*sensor setting*/
1065    m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
1066    m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
1067    m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
1068
1069    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
1070    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
1071
1072    /*request setting*/
1073    m_camera_info.dummy_shot.request_sensor = 1;
1074    m_camera_info.dummy_shot.request_scc = 0;
1075    m_camera_info.dummy_shot.request_scp = 0;
1076    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
1077    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[1] = 0;
1078    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[2] = 0;
1079
1080    m_camera_info.sensor.width = m_camera2->getSensorRawW();
1081    m_camera_info.sensor.height = m_camera2->getSensorRawH();
1082
1083    m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
1084    m_camera_info.sensor.planes = 2;
1085    m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1086    m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1087    m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
1088    m_camera_info.sensor.ionClient = m_ionCameraClient;
1089
1090    for(i = 0; i < m_camera_info.sensor.buffers; i++){
1091        initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1092        m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
1093        m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
1094        allocCameraMemory(m_camera_info.sensor.ionClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1095    }
1096
1097    m_camera_info.isp.width = m_camera_info.sensor.width;
1098    m_camera_info.isp.height = m_camera_info.sensor.height;
1099    m_camera_info.isp.format = m_camera_info.sensor.format;
1100    m_camera_info.isp.planes = m_camera_info.sensor.planes;
1101    m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1102    m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1103    m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
1104    m_camera_info.isp.ionClient = m_ionCameraClient;
1105
1106    for(i = 0; i < m_camera_info.isp.buffers; i++){
1107        initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
1108        m_camera_info.isp.buffer[i].size.extS[0]    = m_camera_info.sensor.buffer[i].size.extS[0];
1109        m_camera_info.isp.buffer[i].size.extS[1]    = m_camera_info.sensor.buffer[i].size.extS[1];
1110        m_camera_info.isp.buffer[i].fd.extFd[0]     = m_camera_info.sensor.buffer[i].fd.extFd[0];
1111        m_camera_info.isp.buffer[i].fd.extFd[1]     = m_camera_info.sensor.buffer[i].fd.extFd[1];
1112        m_camera_info.isp.buffer[i].virt.extP[0]    = m_camera_info.sensor.buffer[i].virt.extP[0];
1113        m_camera_info.isp.buffer[i].virt.extP[1]    = m_camera_info.sensor.buffer[i].virt.extP[1];
1114    };
1115
1116    /* init ISP */
1117    cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
1118    cam_int_s_fmt(&(m_camera_info.isp));
1119    ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
1120    cam_int_reqbufs(&(m_camera_info.isp));
1121    ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
1122    ALOGV("DEBUG(%s): isp mem alloc done",  __FUNCTION__);
1123
1124    /* init Sensor */
1125    cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
1126    ALOGV("DEBUG(%s): sensor s_input done",  __FUNCTION__);
1127    if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
1128        ALOGE("ERR(%s): sensor s_fmt fail",  __FUNCTION__);
1129    }
1130    ALOGV("DEBUG(%s): sensor s_fmt done",  __FUNCTION__);
1131    cam_int_reqbufs(&(m_camera_info.sensor));
1132    ALOGV("DEBUG(%s): sensor reqbuf done",  __FUNCTION__);
1133    for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1134        ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1135        memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1136                sizeof(struct camera2_shot_ext));
1137        m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1138        m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1139        cam_int_qbuf(&(m_camera_info.sensor), i);
1140    }
1141    ALOGV("== stream_on :: .sensor");
1142    cam_int_streamon(&(m_camera_info.sensor));
1143
1144    /* init Capture */
1145    m_camera_info.capture.width = m_camera2->getSensorW();
1146    m_camera_info.capture.height = m_camera2->getSensorH();
1147    m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
1148    m_camera_info.capture.planes = 1;
1149    m_camera_info.capture.buffers = 8;
1150    m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1151    m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
1152    m_camera_info.capture.ionClient = m_ionCameraClient;
1153
1154    for(i = 0; i < m_camera_info.capture.buffers; i++){
1155        initCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1156        m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
1157        allocCameraMemory(m_camera_info.capture.ionClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1158    }
1159
1160    cam_int_s_input(&(m_camera_info.capture), m_camera_info.sensor_id);
1161    cam_int_s_fmt(&(m_camera_info.capture));
1162    ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1163    cam_int_reqbufs(&(m_camera_info.capture));
1164    ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1165
1166    for (i = 0; i < m_camera_info.capture.buffers; i++) {
1167        ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1168        cam_int_qbuf(&(m_camera_info.capture), i);
1169    }
1170
1171    ALOGV("== stream_on :: capture");
1172    if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1173        ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1174    } else {
1175        m_camera_info.capture.status = true;
1176    }
1177}
1178
1179void ExynosCameraHWInterface2::StartISP()
1180{
1181    int i;
1182
1183    for (i = 0; i < m_camera_info.isp.buffers; i++) {
1184        ALOGV("DEBUG(%s): isp initial QBUF [%d]",  __FUNCTION__, i);
1185        cam_int_qbuf(&(m_camera_info.isp), i);
1186    }
1187
1188    ALOGV("== stream_on :: isp");
1189    cam_int_streamon(&(m_camera_info.isp));
1190
1191    for (i = 0; i < m_camera_info.isp.buffers; i++) {
1192        ALOGV("DEBUG(%s): isp initial DQBUF [%d]",  __FUNCTION__, i);
1193        cam_int_dqbuf(&(m_camera_info.isp));
1194    }
1195    exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
1196}
1197
1198int ExynosCameraHWInterface2::getCameraId() const
1199{
1200    return m_cameraId;
1201}
1202
1203int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
1204{
1205    ALOGV("DEBUG(%s):", __FUNCTION__);
1206    if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
1207            && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
1208        m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
1209        return 0;
1210    }
1211    else {
1212        ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
1213        return 1;
1214    }
1215}
1216
1217int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
1218{
1219    ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries());
1220    if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
1221        ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
1222        return 0;
1223    }
1224    m_isRequestQueueNull = false;
1225    if (m_requestManager->GetNumEntries() == 0)
1226        m_requestManager->SetInitialSkip(5);
1227    m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
1228    return 0;
1229}
1230
1231int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
1232{
1233    ALOGV("DEBUG(%s):", __FUNCTION__);
1234    if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
1235            && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
1236        m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
1237        return 0;
1238    }
1239    else {
1240        ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
1241        return 1;
1242    }
1243}
1244
1245int ExynosCameraHWInterface2::getInProgressCount()
1246{
1247    int inProgressCount = m_requestManager->GetNumEntries();
1248    ALOGV("DEBUG(%s): # of dequeued req (%d)", __FUNCTION__, inProgressCount);
1249    return inProgressCount;
1250}
1251
1252int ExynosCameraHWInterface2::flushCapturesInProgress()
1253{
1254    return 0;
1255}
1256
1257int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
1258{
1259    ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
1260
1261    if (request == NULL) return BAD_VALUE;
1262    if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
1263        return BAD_VALUE;
1264    }
1265    status_t res;
1266    // Pass 1, calculate size and allocate
1267    res = m_camera2->constructDefaultRequest(request_template,
1268            request,
1269            true);
1270    if (res != OK) {
1271        return res;
1272    }
1273    // Pass 2, build request
1274    res = m_camera2->constructDefaultRequest(request_template,
1275            request,
1276            false);
1277    if (res != OK) {
1278        ALOGE("Unable to populate new request for template %d",
1279                request_template);
1280    }
1281
1282    return res;
1283}
1284
1285int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
1286                                    uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
1287{
1288    ALOGV("DEBUG(%s): allocate stream width(%d) height(%d) format(%x)", __FUNCTION__,  width, height, format);
1289    char node_name[30];
1290    int fd = 0, allocCase = 0;
1291    StreamThread *AllocatedStream;
1292    stream_parameters_t newParameters;
1293
1294    if (format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE &&
1295        m_camera2->isSupportedResolution(width, height)) {
1296        if (!(m_streamThreads[0].get())) {
1297            ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
1298            allocCase = 0;
1299        }
1300        else {
1301            if ((m_streamThreads[0].get())->m_activated == true) {
1302                ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
1303                allocCase = 1;
1304            }
1305            else {
1306                ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
1307                allocCase = 2;
1308            }
1309        }
1310        if ((width == 1920 && height == 1080) || (width == 1280 && height == 720)) {
1311            m_wideAspect = true;
1312        }
1313        else {
1314            m_wideAspect = false;
1315        }
1316        ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect);
1317
1318        if (allocCase == 0 || allocCase == 2) {
1319            *stream_id = 0;
1320
1321            if (allocCase == 0) {
1322                m_streamThreads[0]  = new StreamThread(this, *stream_id);
1323             }
1324            AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
1325            m_scp_flushing = false;
1326            m_scp_closing = false;
1327            m_scp_closed = false;
1328            usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
1329
1330            *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1331            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1332            *max_buffers = 8;
1333
1334            newParameters.streamType    = STREAM_TYPE_DIRECT;
1335            newParameters.outputWidth   = width;
1336            newParameters.outputHeight  = height;
1337            newParameters.nodeWidth     = width;
1338            newParameters.nodeHeight    = height;
1339            newParameters.outputFormat  = *format_actual;
1340            newParameters.nodeFormat    = HAL_PIXEL_FORMAT_2_V4L2_PIX(*format_actual);
1341            newParameters.streamOps     = stream_ops;
1342            newParameters.usage         = *usage;
1343            newParameters.numHwBuffers  = 8;
1344            newParameters.numOwnSvcBuffers = *max_buffers;
1345            newParameters.fd            = m_fd_scp;
1346            newParameters.nodePlanes    = 3;
1347            newParameters.svcPlanes     = 3;
1348            newParameters.halBuftype    = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1349            newParameters.memory        = V4L2_MEMORY_DMABUF;
1350            newParameters.ionClient     = m_ionCameraClient;
1351            newParameters.numSvcBufsInHal  = 0;
1352            AllocatedStream->m_index = *stream_id;
1353            AllocatedStream->setParameter(&newParameters);
1354            AllocatedStream->m_activated = true;
1355
1356            m_scp_flushing = false;
1357            m_scp_closing = false;
1358            m_scp_closed = false;
1359            m_requestManager->SetDefaultParameters(m_camera2->getSensorW());
1360            m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW();
1361            return 0;
1362        }
1363        else if (allocCase == 1) {
1364            record_parameters_t recordParameters;
1365            StreamThread *parentStream;
1366            parentStream = (StreamThread*)(m_streamThreads[0].get());
1367            if (!parentStream) {
1368                return 1;
1369                // TODO
1370            }
1371            *stream_id = 2;
1372            usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
1373
1374            *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M
1375            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1376            *max_buffers = 6;
1377
1378            recordParameters.outputWidth   = width;
1379            recordParameters.outputHeight  = height;
1380            recordParameters.outputFormat     = *format_actual;
1381            recordParameters.svcPlanes        = NUM_PLANES(*format_actual);
1382            recordParameters.streamOps     = stream_ops;
1383            recordParameters.usage         = *usage;
1384            recordParameters.numOwnSvcBuffers = *max_buffers;
1385            recordParameters.numSvcBufsInHal  = 0;
1386
1387            parentStream->setRecordingParameter(&recordParameters);
1388            m_scp_flushing = false;
1389            m_scp_closing = false;
1390            m_scp_closed = false;
1391            m_recordingEnabled = true;
1392            return 0;
1393        }
1394    }
1395    else if (format == HAL_PIXEL_FORMAT_BLOB
1396            && m_camera2->isSupportedJpegResolution(width, height)) {
1397
1398        *stream_id = 1;
1399
1400        if (!(m_streamThreads[*stream_id].get())) {
1401            ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
1402            m_streamThreads[1]  = new StreamThread(this, *stream_id);
1403            allocCase = 0;
1404        }
1405        else {
1406            if ((m_streamThreads[*stream_id].get())->m_activated == true) {
1407                ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
1408                allocCase = 1;
1409            }
1410            else {
1411                ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
1412                allocCase = 2;
1413            }
1414        }
1415
1416        AllocatedStream = (StreamThread*)(m_streamThreads[*stream_id].get());
1417
1418        fd = m_camera_info.capture.fd;
1419        usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
1420
1421        *format_actual = HAL_PIXEL_FORMAT_BLOB;
1422
1423        *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1424        *max_buffers = 4;
1425
1426        newParameters.streamType    = STREAM_TYPE_INDIRECT;
1427        newParameters.outputWidth   = width;
1428        newParameters.outputHeight  = height;
1429
1430        newParameters.nodeWidth     = m_camera2->getSensorW();
1431        newParameters.nodeHeight    = m_camera2->getSensorH();
1432
1433        newParameters.outputFormat  = *format_actual;
1434        newParameters.nodeFormat    = V4L2_PIX_FMT_YUYV;
1435        newParameters.streamOps     = stream_ops;
1436        newParameters.usage         = *usage;
1437        newParameters.numHwBuffers  = 8;
1438        newParameters.numOwnSvcBuffers = *max_buffers;
1439        newParameters.fd            = fd;
1440        newParameters.nodePlanes    = 1;
1441        newParameters.svcPlanes     = 1;
1442        newParameters.halBuftype    = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1443        newParameters.memory        = V4L2_MEMORY_DMABUF;
1444        newParameters.ionClient     = m_ionCameraClient;
1445        newParameters.numSvcBufsInHal  = 0;
1446        AllocatedStream->m_index = *stream_id;
1447        AllocatedStream->setParameter(&newParameters);
1448        return 0;
1449    }
1450    ALOGE("DEBUG(%s): Unsupported Pixel Format", __FUNCTION__);
1451    return 1; // TODO : check proper error code
1452}
1453
1454int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
1455        int num_buffers, buffer_handle_t *registeringBuffers)
1456{
1457    int                     i,j;
1458    void                    *virtAddr[3];
1459    uint32_t                plane_index = 0;
1460    stream_parameters_t     *targetStreamParms;
1461    record_parameters_t     *targetRecordParms;
1462    node_info_t             *currentNode;
1463
1464    struct v4l2_buffer v4l2_buf;
1465    struct v4l2_plane  planes[VIDEO_MAX_PLANES];
1466
1467    ALOGV("DEBUG(%s): streamID (%d), num_buff(%d), handle(%x) ", __FUNCTION__,
1468        stream_id, num_buffers, (uint32_t)registeringBuffers);
1469
1470    if (stream_id == 0) {
1471        targetStreamParms = &(m_streamThreads[0]->m_parameters);
1472    }
1473    else if (stream_id == 1) {
1474        targetStreamParms = &(m_streamThreads[1]->m_parameters);
1475        // TODO : make clear stream off case
1476        m_need_streamoff = 0;
1477
1478        if (m_camera_info.capture.status == false) {
1479            /* capture */
1480            m_camera_info.capture.buffers = 8;
1481            cam_int_s_fmt(&(m_camera_info.capture));
1482            cam_int_reqbufs(&(m_camera_info.capture));
1483            for (i = 0; i < m_camera_info.capture.buffers; i++) {
1484                ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1485                cam_int_qbuf(&(m_camera_info.capture), i);
1486            }
1487
1488            if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1489                ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1490            } else {
1491                m_camera_info.capture.status = true;
1492            }
1493        }
1494    }
1495    else if (stream_id == 2) {
1496        m_need_streamoff = 0;
1497        targetRecordParms = &(m_streamThreads[0]->m_recordParameters);
1498
1499        targetRecordParms->numSvcBuffers = num_buffers;
1500
1501        for (i = 0 ; i<targetRecordParms->numSvcBuffers ; i++) {
1502            ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
1503                i, (uint32_t)(registeringBuffers[i]));
1504            if (m_grallocHal) {
1505                if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
1506                       targetRecordParms->usage, 0, 0,
1507                       targetRecordParms->outputWidth, targetRecordParms->outputHeight, virtAddr) != 0) {
1508                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
1509                }
1510                else {
1511                    ExynosBuffer currentBuf;
1512                    const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
1513                    currentBuf.fd.extFd[0] = priv_handle->fd;
1514                    currentBuf.fd.extFd[1] = priv_handle->fd1;
1515                    currentBuf.fd.extFd[2] = priv_handle->fd2;
1516                    for (plane_index=0 ; plane_index < targetRecordParms->svcPlanes ; plane_index++) {
1517                        currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
1518                        ALOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x)",
1519                             __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index],
1520                             (unsigned int)currentBuf.virt.extP[plane_index]);
1521                    }
1522                    targetRecordParms->svcBufStatus[i]  = ON_SERVICE;
1523                    targetRecordParms->svcBuffers[i]    = currentBuf;
1524                    targetRecordParms->svcBufHandle[i]  = registeringBuffers[i];
1525                }
1526            }
1527        }
1528        m_needsRecordBufferInit = true;
1529        return 0;
1530    }
1531    else {
1532        ALOGE("ERR(%s) unregisterd stream id (%d)", __FUNCTION__, stream_id);
1533        return 1;
1534    }
1535
1536    if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
1537        if (num_buffers < targetStreamParms->numHwBuffers) {
1538            ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
1539                __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
1540            return 1;
1541        }
1542    }
1543    ALOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
1544            __FUNCTION__, targetStreamParms->outputFormat, targetStreamParms->outputWidth,
1545            targetStreamParms->outputHeight, targetStreamParms->svcPlanes);
1546
1547    targetStreamParms->numSvcBuffers = num_buffers;
1548    currentNode = &(targetStreamParms->node); // TO Remove
1549
1550    currentNode->fd         = targetStreamParms->fd;
1551    currentNode->width      = targetStreamParms->nodeWidth;
1552    currentNode->height     = targetStreamParms->nodeHeight;
1553    currentNode->format     = targetStreamParms->nodeFormat;
1554    currentNode->planes     = targetStreamParms->nodePlanes;
1555    currentNode->buffers    = targetStreamParms->numHwBuffers;
1556    currentNode->type       = targetStreamParms->halBuftype;
1557    currentNode->memory     = targetStreamParms->memory;
1558    currentNode->ionClient  = targetStreamParms->ionClient;
1559
1560    if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
1561        if(m_need_streamoff == 1) {
1562            if (m_sensorThread != NULL) {
1563                m_sensorThread->release();
1564                /* TODO */
1565                usleep(500000);
1566            } else {
1567                ALOGE("+++++++ sensor thread is NULL %d", __LINE__);
1568            }
1569
1570            ALOGV("(%s): calling capture streamoff", __FUNCTION__);
1571            if (cam_int_streamoff(&(m_camera_info.capture)) < 0) {
1572                ALOGE("ERR(%s): capture stream off fail", __FUNCTION__);
1573            } else {
1574                m_camera_info.capture.status = false;
1575            }
1576
1577            ALOGV("(%s): calling capture streamoff done", __FUNCTION__);
1578
1579            m_camera_info.capture.buffers = 0;
1580            ALOGV("DEBUG(%s): capture calling reqbuf 0 ", __FUNCTION__);
1581            cam_int_reqbufs(&(m_camera_info.capture));
1582            ALOGV("DEBUG(%s): capture calling reqbuf 0 done", __FUNCTION__);
1583
1584            m_isIspStarted = false;
1585        }
1586
1587        if (m_need_streamoff == 1) {
1588            m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1589            m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1590            m_camera_info.capture.buffers = 8;
1591            /* isp */
1592            cam_int_s_fmt(&(m_camera_info.isp));
1593            cam_int_reqbufs(&(m_camera_info.isp));
1594            /* sensor */
1595            cam_int_s_fmt(&(m_camera_info.sensor));
1596            cam_int_reqbufs(&(m_camera_info.sensor));
1597
1598            for (i = 0; i < 8; i++) {
1599                ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1600                memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1601                        sizeof(struct camera2_shot_ext));
1602                m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1603                m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1604                cam_int_qbuf(&(m_camera_info.sensor), i);
1605            }
1606
1607            /* capture */
1608            cam_int_s_fmt(&(m_camera_info.capture));
1609            cam_int_reqbufs(&(m_camera_info.capture));
1610            for (i = 0; i < m_camera_info.capture.buffers; i++) {
1611                ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1612                cam_int_qbuf(&(m_camera_info.capture), i);
1613            }
1614
1615       }
1616
1617        cam_int_s_input(currentNode, m_camera_info.sensor_id);
1618        cam_int_s_fmt(currentNode);
1619        cam_int_reqbufs(currentNode);
1620
1621    }
1622    else if (targetStreamParms->streamType == STREAM_TYPE_INDIRECT) {
1623        for(i = 0; i < currentNode->buffers; i++){
1624            memcpy(&(currentNode->buffer[i]), &(m_camera_info.capture.buffer[i]), sizeof(ExynosBuffer));
1625        }
1626    }
1627
1628    for (i = 0 ; i<targetStreamParms->numSvcBuffers ; i++) {
1629        ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
1630            i, (uint32_t)(registeringBuffers[i]));
1631        if (m_grallocHal) {
1632            if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
1633                   targetStreamParms->usage, 0, 0,
1634                   currentNode->width, currentNode->height, virtAddr) != 0) {
1635                ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
1636            }
1637            else {
1638                v4l2_buf.m.planes   = planes;
1639                v4l2_buf.type       = currentNode->type;
1640                v4l2_buf.memory     = currentNode->memory;
1641                v4l2_buf.index      = i;
1642                v4l2_buf.length     = currentNode->planes;
1643
1644                ExynosBuffer currentBuf;
1645                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
1646
1647                m_getAlignedYUVSize(currentNode->format,
1648                    currentNode->width, currentNode->height, &currentBuf);
1649
1650                v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
1651                v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
1652                v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
1653                currentBuf.fd.extFd[0] = priv_handle->fd;
1654                currentBuf.fd.extFd[2] = priv_handle->fd1;
1655                currentBuf.fd.extFd[1] = priv_handle->fd2;
1656                ALOGV("DEBUG(%s):  ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride);
1657                if (currentNode->planes == 1) {
1658                    currentBuf.size.extS[0] = priv_handle->size;
1659                    currentBuf.size.extS[1] = 0;
1660                    currentBuf.size.extS[2] = 0;
1661                }
1662                for (plane_index = 0 ; plane_index < v4l2_buf.length ; plane_index++) {
1663                    currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
1664                    v4l2_buf.m.planes[plane_index].length  = currentBuf.size.extS[plane_index];
1665                    ALOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x), length(%d)",
1666                         __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
1667                         (unsigned int)currentBuf.virt.extP[plane_index],
1668                         v4l2_buf.m.planes[plane_index].length);
1669                }
1670
1671                if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
1672                    if (i < currentNode->buffers) {
1673                        if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
1674                            ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
1675                                __FUNCTION__, stream_id, currentNode->fd);
1676                            //return false;
1677                        }
1678                        ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
1679                                __FUNCTION__, stream_id, currentNode->fd);
1680                        targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
1681                    }
1682                    else {
1683                        targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
1684                    }
1685                }
1686                else if (targetStreamParms->streamType == STREAM_TYPE_INDIRECT) {
1687                    targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
1688                }
1689                targetStreamParms->svcBuffers[i]       = currentBuf;
1690                targetStreamParms->svcBufHandle[i]     = registeringBuffers[i];
1691            }
1692        }
1693    }
1694
1695    ALOGV("DEBUG(%s): calling  streamon", __FUNCTION__);
1696    if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
1697        ALOGD("%s(%d), stream id = %d", __FUNCTION__, __LINE__, stream_id);
1698        cam_int_streamon(&(targetStreamParms->node));
1699    }
1700
1701    if (m_need_streamoff == 1) {
1702        if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1703            ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1704        } else {
1705            m_camera_info.capture.status = true;
1706        }
1707
1708        cam_int_streamon(&(m_camera_info.sensor));
1709    }
1710
1711    ALOGV("DEBUG(%s): calling  streamon END", __FUNCTION__);
1712    ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
1713
1714    if(!m_isIspStarted) {
1715        m_isIspStarted = true;
1716        StartISP();
1717    }
1718
1719    if (m_need_streamoff == 1) {
1720        m_requestManager->SetInitialSkip(8);
1721        m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1722        m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
1723    }
1724    m_need_streamoff = 1;
1725
1726    return 0;
1727}
1728
1729int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
1730{
1731    StreamThread *targetStream;
1732    ALOGV("DEBUG(%s):", __FUNCTION__);
1733
1734    if (stream_id == 0) {
1735        targetStream = (StreamThread*)(m_streamThreads[0].get());
1736        m_scp_flushing = true;
1737    }
1738    else if (stream_id == 1) {
1739        targetStream = (StreamThread*)(m_streamThreads[1].get());
1740    }
1741    else if (stream_id == 2 && m_recordingEnabled) {
1742        m_recordingEnabled = false;
1743        m_needsRecordBufferInit = true;
1744        return 0;
1745    }
1746    else {
1747        ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
1748        return 1;
1749    }
1750
1751    targetStream->m_releasing = true;
1752    do {
1753        ALOGD("stream thread release %d", __LINE__);
1754        targetStream->release();
1755        usleep(33000);
1756    } while (targetStream->m_releasing);
1757    targetStream->m_activated = false;
1758    ALOGV("DEBUG(%s): DONE", __FUNCTION__);
1759    return 0;
1760}
1761
1762int ExynosCameraHWInterface2::allocateReprocessStream(
1763    uint32_t width, uint32_t height, uint32_t format,
1764    const camera2_stream_in_ops_t *reprocess_stream_ops,
1765    uint32_t *stream_id, uint32_t *consumer_usage, uint32_t *max_buffers)
1766{
1767    ALOGV("DEBUG(%s):", __FUNCTION__);
1768    return 0;
1769}
1770
1771int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
1772{
1773    ALOGV("DEBUG(%s):", __FUNCTION__);
1774    return 0;
1775}
1776
1777int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
1778{
1779    ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2);
1780
1781    switch (trigger_id) {
1782    case CAMERA2_TRIGGER_AUTOFOCUS:
1783        ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
1784        OnAfTrigger(ext1);
1785        break;
1786
1787    case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
1788        ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
1789        OnAfCancel(ext1);
1790        break;
1791    default:
1792        break;
1793    }
1794    return 0;
1795}
1796
1797int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
1798{
1799    ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb);
1800    m_notifyCb = notify_cb;
1801    m_callbackCookie = user;
1802    return 0;
1803}
1804
1805int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
1806{
1807    ALOGV("DEBUG(%s):", __FUNCTION__);
1808    return 0;
1809}
1810
1811int ExynosCameraHWInterface2::dump(int fd)
1812{
1813    ALOGV("DEBUG(%s):", __FUNCTION__);
1814    return 0;
1815}
1816
1817void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
1818{
1819    switch (colorFormat) {
1820    // 1p
1821    case V4L2_PIX_FMT_RGB565 :
1822    case V4L2_PIX_FMT_YUYV :
1823    case V4L2_PIX_FMT_UYVY :
1824    case V4L2_PIX_FMT_VYUY :
1825    case V4L2_PIX_FMT_YVYU :
1826        buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
1827        buf->size.extS[1] = 0;
1828        buf->size.extS[2] = 0;
1829        break;
1830    // 2p
1831    case V4L2_PIX_FMT_NV12 :
1832    case V4L2_PIX_FMT_NV12T :
1833    case V4L2_PIX_FMT_NV21 :
1834        buf->size.extS[0] = ALIGN(w,   16) * ALIGN(h,   16);
1835        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
1836        buf->size.extS[2] = 0;
1837        break;
1838    case V4L2_PIX_FMT_NV12M :
1839    case V4L2_PIX_FMT_NV12MT_16X16 :
1840    case V4L2_PIX_FMT_NV21M:
1841        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h,     16);
1842        buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
1843        buf->size.extS[2] = 0;
1844        break;
1845    case V4L2_PIX_FMT_NV16 :
1846    case V4L2_PIX_FMT_NV61 :
1847        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
1848        buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h,  16);
1849        buf->size.extS[2] = 0;
1850        break;
1851     // 3p
1852    case V4L2_PIX_FMT_YUV420 :
1853    case V4L2_PIX_FMT_YVU420 :
1854        buf->size.extS[0] = (w * h);
1855        buf->size.extS[1] = (w * h) >> 2;
1856        buf->size.extS[2] = (w * h) >> 2;
1857        break;
1858    case V4L2_PIX_FMT_YUV420M:
1859    case V4L2_PIX_FMT_YVU420M :
1860    case V4L2_PIX_FMT_YUV422P :
1861        buf->size.extS[0] = ALIGN(w,  32) * ALIGN(h,  16);
1862        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
1863        buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
1864        break;
1865    default:
1866        ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
1867        return;
1868        break;
1869    }
1870}
1871
1872bool ExynosCameraHWInterface2::m_getRatioSize(int  src_w,  int   src_h,
1873                                             int  dst_w,  int   dst_h,
1874                                             int *crop_x, int *crop_y,
1875                                             int *crop_w, int *crop_h,
1876                                             int zoom)
1877{
1878    *crop_w = src_w;
1879    *crop_h = src_h;
1880
1881    if (   src_w != dst_w
1882        || src_h != dst_h) {
1883        float src_ratio = 1.0f;
1884        float dst_ratio = 1.0f;
1885
1886        // ex : 1024 / 768
1887        src_ratio = (float)src_w / (float)src_h;
1888
1889        // ex : 352  / 288
1890        dst_ratio = (float)dst_w / (float)dst_h;
1891
1892        if (dst_w * dst_h < src_w * src_h) {
1893            if (dst_ratio <= src_ratio) {
1894                // shrink w
1895                *crop_w = src_h * dst_ratio;
1896                *crop_h = src_h;
1897            } else {
1898                // shrink h
1899                *crop_w = src_w;
1900                *crop_h = src_w / dst_ratio;
1901            }
1902        } else {
1903            if (dst_ratio <= src_ratio) {
1904                // shrink w
1905                *crop_w = src_h * dst_ratio;
1906                *crop_h = src_h;
1907            } else {
1908                // shrink h
1909                *crop_w = src_w;
1910                *crop_h = src_w / dst_ratio;
1911            }
1912        }
1913    }
1914
1915    if (zoom != 0) {
1916        float zoomLevel = ((float)zoom + 10.0) / 10.0;
1917        *crop_w = (int)((float)*crop_w / zoomLevel);
1918        *crop_h = (int)((float)*crop_h / zoomLevel);
1919    }
1920
1921    #define CAMERA_CROP_WIDTH_RESTRAIN_NUM  (0x2)
1922    unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
1923    if (w_align != 0) {
1924        if (  (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
1925            && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) {
1926            *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
1927        }
1928        else
1929            *crop_w -= w_align;
1930    }
1931
1932    #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM  (0x2)
1933    unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
1934    if (h_align != 0) {
1935        if (  (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
1936            && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) {
1937            *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
1938        }
1939        else
1940            *crop_h -= h_align;
1941    }
1942
1943    *crop_x = (src_w - *crop_w) >> 1;
1944    *crop_y = (src_h - *crop_h) >> 1;
1945
1946    if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
1947        *crop_x -= 1;
1948
1949    if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
1950        *crop_y -= 1;
1951
1952    return true;
1953}
1954
1955BayerBufManager::BayerBufManager()
1956{
1957    ALOGV("DEBUG(%s): ", __FUNCTION__);
1958    for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) {
1959        entries[i].status = BAYER_ON_HAL_EMPTY;
1960        entries[i].reqFrameCnt = 0;
1961    }
1962    sensorEnqueueHead = 0;
1963    sensorDequeueHead = 0;
1964    ispEnqueueHead = 0;
1965    ispDequeueHead = 0;
1966    numOnSensor = 0;
1967    numOnIsp = 0;
1968    numOnHalFilled = 0;
1969    numOnHalEmpty = NUM_BAYER_BUFFERS;
1970}
1971
1972BayerBufManager::~BayerBufManager()
1973{
1974    ALOGV("%s", __FUNCTION__);
1975}
1976
1977int     BayerBufManager::GetIndexForSensorEnqueue()
1978{
1979    int ret = 0;
1980    if (numOnHalEmpty == 0)
1981        ret = -1;
1982    else
1983        ret = sensorEnqueueHead;
1984    ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret);
1985    return ret;
1986}
1987
1988int    BayerBufManager::MarkSensorEnqueue(int index)
1989{
1990    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
1991
1992    // sanity check
1993    if (index != sensorEnqueueHead) {
1994        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead);
1995        return -1;
1996    }
1997    if (entries[index].status != BAYER_ON_HAL_EMPTY) {
1998        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
1999            index, entries[index].status, BAYER_ON_HAL_EMPTY);
2000        return -1;
2001    }
2002
2003    entries[index].status = BAYER_ON_SENSOR;
2004    entries[index].reqFrameCnt = 0;
2005    numOnHalEmpty--;
2006    numOnSensor++;
2007    sensorEnqueueHead = GetNextIndex(index);
2008    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2009        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2010    return 0;
2011}
2012
2013int    BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t *timeStamp)
2014{
2015    ALOGV("DEBUG(%s)    : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
2016
2017    if (entries[index].status != BAYER_ON_SENSOR) {
2018        ALOGE("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2019            index, entries[index].status, BAYER_ON_SENSOR);
2020        return -1;
2021    }
2022
2023    entries[index].status = BAYER_ON_HAL_FILLED;
2024    numOnHalFilled++;
2025    numOnSensor--;
2026
2027    return 0;
2028}
2029
2030int     BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt)
2031{
2032    int ret = 0;
2033    if (numOnHalFilled == 0)
2034        ret = -1;
2035    else {
2036        *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt;
2037        ret = ispEnqueueHead;
2038    }
2039    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2040    return ret;
2041}
2042
2043int     BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt)
2044{
2045    int ret = 0;
2046    if (numOnIsp == 0)
2047        ret = -1;
2048    else {
2049        *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt;
2050        ret = ispDequeueHead;
2051    }
2052    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2053    return ret;
2054}
2055
2056int    BayerBufManager::MarkIspEnqueue(int index)
2057{
2058    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2059
2060    // sanity check
2061    if (index != ispEnqueueHead) {
2062        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead);
2063        return -1;
2064    }
2065    if (entries[index].status != BAYER_ON_HAL_FILLED) {
2066        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2067            index, entries[index].status, BAYER_ON_HAL_FILLED);
2068        return -1;
2069    }
2070
2071    entries[index].status = BAYER_ON_ISP;
2072    numOnHalFilled--;
2073    numOnIsp++;
2074    ispEnqueueHead = GetNextIndex(index);
2075    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2076        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2077    return 0;
2078}
2079
2080int    BayerBufManager::MarkIspDequeue(int index)
2081{
2082    ALOGV("DEBUG(%s)    : BayerIndex[%d]", __FUNCTION__, index);
2083
2084    // sanity check
2085    if (index != ispDequeueHead) {
2086        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead);
2087        return -1;
2088    }
2089    if (entries[index].status != BAYER_ON_ISP) {
2090        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2091            index, entries[index].status, BAYER_ON_ISP);
2092        return -1;
2093    }
2094
2095    entries[index].status = BAYER_ON_HAL_EMPTY;
2096    entries[index].reqFrameCnt = 0;
2097    numOnHalEmpty++;
2098    numOnIsp--;
2099    ispDequeueHead = GetNextIndex(index);
2100    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2101        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2102    return 0;
2103}
2104
2105int BayerBufManager::GetNumOnSensor()
2106{
2107    return numOnSensor;
2108}
2109
2110int BayerBufManager::GetNumOnHalFilled()
2111{
2112    return numOnHalFilled;
2113}
2114
2115int BayerBufManager::GetNumOnIsp()
2116{
2117    return numOnIsp;
2118}
2119
2120int     BayerBufManager::GetNextIndex(int index)
2121{
2122    index++;
2123    if (index >= NUM_BAYER_BUFFERS)
2124        index = 0;
2125
2126    return index;
2127}
2128
2129void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
2130{
2131    camera_metadata_t *currentRequest = NULL;
2132    camera_metadata_t *currentFrame = NULL;
2133    size_t numEntries = 0;
2134    size_t frameSize = 0;
2135    camera_metadata_t * preparedFrame = NULL;
2136    camera_metadata_t *deregisteredRequest = NULL;
2137    uint32_t currentSignal = self->GetProcessingSignal();
2138    MainThread *  selfThread      = ((MainThread*)self);
2139    int res = 0;
2140
2141    int ret;
2142
2143    ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
2144
2145    if (currentSignal & SIGNAL_THREAD_RELEASE) {
2146        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2147
2148        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
2149        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2150        return;
2151    }
2152
2153    if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
2154        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
2155        if (m_requestManager->IsRequestQueueFull()==false) {
2156            m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
2157            if (NULL == currentRequest) {
2158                ALOGE("DEBUG(%s)(0x%x): dequeue_request returned NULL ", __FUNCTION__, currentSignal);
2159                m_isRequestQueueNull = true;
2160            }
2161            else {
2162                m_requestManager->RegisterRequest(currentRequest);
2163
2164                m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
2165                ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
2166                if (m_requestManager->IsRequestQueueFull()==false)
2167                    selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
2168
2169                m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2170            }
2171        }
2172        else {
2173            m_isRequestQueuePending = true;
2174        }
2175    }
2176
2177    if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
2178        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
2179        /*while (1)*/ {
2180            ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService());
2181            if (ret == false)
2182                ALOGD("++++++ PrepareFrame ret = %d", ret);
2183
2184            m_requestManager->DeregisterRequest(&deregisteredRequest);
2185
2186            ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
2187            if (ret < 0)
2188                ALOGD("++++++ free_request ret = %d", ret);
2189
2190            ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
2191            if (ret < 0)
2192                ALOGD("++++++ dequeue_frame ret = %d", ret);
2193
2194            if (currentFrame==NULL) {
2195                ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
2196            }
2197            else {
2198                ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize);
2199            }
2200            res = append_camera_metadata(currentFrame, preparedFrame);
2201            if (res==0) {
2202                ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
2203                m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
2204            }
2205            else {
2206                ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
2207            }
2208        }
2209        if (!m_isRequestQueueNull) {
2210            selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
2211        }
2212
2213        if (getInProgressCount()>0) {
2214            ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
2215            m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2216        }
2217    }
2218    ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
2219    return;
2220}
2221
2222void ExynosCameraHWInterface2::m_sensorThreadInitialize(SignalDrivenThread * self)
2223{
2224    ALOGV("DEBUG(%s): ", __FUNCTION__ );
2225    /* will add */
2226    return;
2227}
2228
2229
2230void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
2231{
2232    ALOGD("####  common Section");
2233    ALOGD("####                 magic(%x) ",
2234        shot_ext->shot.magicNumber);
2235    ALOGD("####  ctl Section");
2236    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
2237        shot_ext->shot.ctl.request.metadataMode,
2238        shot_ext->shot.ctl.lens.aperture,
2239        shot_ext->shot.ctl.sensor.exposureTime,
2240        shot_ext->shot.ctl.sensor.frameDuration,
2241        shot_ext->shot.ctl.sensor.sensitivity,
2242        shot_ext->shot.ctl.aa.awbMode);
2243
2244    ALOGD("####                 OutputStream Sensor(%d) SCP(%d) SCC(%d) pv(%d) rec(%d)",
2245        shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
2246        shot_ext->shot.ctl.request.outputStreams[0],
2247        shot_ext->shot.ctl.request.outputStreams[2]);
2248
2249    ALOGD("####  DM Section");
2250    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
2251        shot_ext->shot.dm.request.metadataMode,
2252        shot_ext->shot.dm.lens.aperture,
2253        shot_ext->shot.dm.sensor.exposureTime,
2254        shot_ext->shot.dm.sensor.frameDuration,
2255        shot_ext->shot.dm.sensor.sensitivity,
2256        shot_ext->shot.dm.sensor.timeStamp,
2257        shot_ext->shot.dm.aa.awbMode,
2258        shot_ext->shot.dm.request.frameCount );
2259}
2260
2261void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
2262{
2263    uint32_t        currentSignal = self->GetProcessingSignal();
2264    SensorThread *  selfThread      = ((SensorThread*)self);
2265    int index;
2266    int index_isp;
2267    status_t res;
2268    nsecs_t frameTime;
2269    int bayersOnSensor = 0, bayersOnIsp = 0;
2270    int j = 0;
2271    bool isCapture = false;
2272    ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
2273
2274    if (currentSignal & SIGNAL_THREAD_RELEASE) {
2275        ALOGV("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2276
2277        ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
2278        cam_int_streamoff(&(m_camera_info.sensor));
2279        ALOGV("(%s): calling sensor streamoff done", __FUNCTION__);
2280
2281        m_camera_info.sensor.buffers = 0;
2282        ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__);
2283        cam_int_reqbufs(&(m_camera_info.sensor));
2284        ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__);
2285
2286        ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
2287        isp_int_streamoff(&(m_camera_info.isp));
2288        ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
2289
2290        m_camera_info.isp.buffers = 0;
2291        ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__);
2292        cam_int_reqbufs(&(m_camera_info.isp));
2293        ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__);
2294
2295        exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
2296
2297        ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2298        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2299        return;
2300    }
2301
2302    if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
2303    {
2304        ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
2305        int targetStreamIndex = 0, i=0;
2306        int matchedFrameCnt = -1, processingReqIndex;
2307        struct camera2_shot_ext *shot_ext;
2308        struct camera2_shot_ext *shot_ext_capture;
2309        bool triggered = false;
2310        int afMode;
2311
2312        /* dqbuf from sensor */
2313        ALOGV("Sensor DQbuf start");
2314        index = cam_int_dqbuf(&(m_camera_info.sensor));
2315        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
2316
2317        m_recordOutput = shot_ext->shot.ctl.request.outputStreams[2];
2318
2319        matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
2320
2321        if (matchedFrameCnt != -1) {
2322                frameTime = systemTime();
2323        m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
2324            if (m_IsAfModeUpdateRequired) {
2325                ALOGE("### AF Mode change(Mode %d) ", m_afMode);
2326                shot_ext->shot.ctl.aa.afMode = m_afMode;
2327                if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
2328                    ALOGE("### With Automatic triger for continuous modes");
2329                    m_afState = HAL_AFSTATE_STARTED;
2330                    shot_ext->shot.ctl.aa.afTrigger = 1;
2331                    triggered = true;
2332                }
2333                m_IsAfModeUpdateRequired = false;
2334                if (m_afMode2 != NO_CHANGE) {
2335                    enum aa_afmode tempAfMode = m_afMode2;
2336                    m_afMode2 = NO_CHANGE;
2337                    SetAfMode(tempAfMode);
2338                }
2339            }
2340            else {
2341                shot_ext->shot.ctl.aa.afMode = NO_CHANGE;
2342            }
2343            if (m_IsAfTriggerRequired) {
2344                ALOGE("### AF Triggering with mode (%d)", m_afMode);
2345                if (m_afState != HAL_AFSTATE_NEEDS_COMMAND)
2346                    ALOGE("(%s): wrong trigger state %d", __FUNCTION__, m_afState);
2347                else
2348                    m_afState = HAL_AFSTATE_STARTED;
2349                m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, true);
2350                shot_ext->shot.ctl.aa.afMode = m_afMode;
2351                m_IsAfTriggerRequired = false;
2352            }
2353            else {
2354                m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, false);
2355            }
2356            if (m_wideAspect) {
2357//                shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
2358                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
2359                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
2360            }
2361            else {
2362//                shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
2363            }
2364            if (triggered)
2365                shot_ext->shot.ctl.aa.afTrigger = 1;
2366
2367            // TODO : check collision with AFMode Update
2368            if (m_IsAfLockRequired) {
2369                shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF;
2370                m_IsAfLockRequired = false;
2371            }
2372            ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)",
2373                index,
2374                shot_ext->shot.ctl.request.frameCount,
2375                shot_ext->request_scp,
2376                shot_ext->request_scc,
2377                shot_ext->dis_bypass, sizeof(camera2_shot));
2378
2379            if(shot_ext->request_scc == 1) {
2380                isCapture = true;
2381            }
2382
2383            if(isCapture)
2384            {
2385                for(j = 0; j < m_camera_info.isp.buffers; j++)
2386                {
2387                    shot_ext_capture = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[j].virt.extP[1]);
2388                    shot_ext_capture->request_scc = 1;
2389                }
2390            }
2391
2392            cam_int_qbuf(&(m_camera_info.isp), index);
2393            //m_ispThread->SetSignal(SIGNAL_ISP_START_BAYER_DEQUEUE);
2394
2395            usleep(10000);
2396            if(isCapture)
2397            {
2398                for(j = 0; j < m_camera_info.isp.buffers; j++)
2399                {
2400                    shot_ext_capture = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[j].virt.extP[1]);
2401                    ALOGD("shot_ext_capture[%d] scp = %d, scc = %d", j, shot_ext_capture->request_scp, shot_ext_capture->request_scc);
2402//                    DumpInfoWithShot(shot_ext_capture);
2403                }
2404            }
2405
2406
2407            ALOGV("### isp DQBUF start");
2408            index_isp = cam_int_dqbuf(&(m_camera_info.isp));
2409            //m_previewOutput = 0;
2410
2411            if(isCapture)
2412            {
2413                for(j = 0; j < m_camera_info.isp.buffers; j++)
2414                {
2415                    shot_ext_capture = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[j].virt.extP[1]);
2416                    ALOGD("shot_ext_capture[%d] scp = %d, scc = %d", j, shot_ext_capture->request_scp, shot_ext_capture->request_scc);
2417//                    DumpInfoWithShot(shot_ext_capture);
2418                }
2419            }
2420            shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
2421
2422            ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) shot_size(%d)",
2423                index,
2424                shot_ext->shot.ctl.request.frameCount,
2425                shot_ext->request_scp,
2426                shot_ext->request_scc,
2427                shot_ext->dis_bypass, sizeof(camera2_shot));
2428
2429            if(isCapture) {
2430                    ALOGD("======= request_scc is 1");
2431                    memcpy(&m_jpegMetadata, &shot_ext->shot, sizeof(struct camera2_shot));
2432                    ALOGV("### Saving informationfor jpeg");
2433                    m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
2434
2435                for(j = 0; j < m_camera_info.isp.buffers; j++)
2436                {
2437                    shot_ext_capture = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[j].virt.extP[1]);
2438                    shot_ext_capture->request_scc = 0;
2439                }
2440
2441                isCapture = false;
2442            }
2443
2444            if (shot_ext->request_scp) {
2445                m_previewOutput = 1;
2446                m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
2447            }
2448
2449            if (shot_ext->request_scc) {
2450                m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
2451            }
2452
2453            ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
2454               shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
2455            if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
2456                ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
2457                m_scp_closed = true;
2458            }
2459            else
2460                m_scp_closed = false;
2461
2462            m_requestManager->ApplyDynamicMetadata(shot_ext);
2463            OnAfNotification(shot_ext->shot.dm.aa.afState);
2464        }
2465
2466        processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]), &afMode);
2467        if (processingReqIndex == -1)
2468        {
2469            ALOGE("DEBUG(%s) req underrun => inserting bubble to BayerIndex(%d)", __FUNCTION__, index);
2470        }
2471        else {
2472            SetAfMode((enum aa_afmode)afMode);
2473        }
2474
2475        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
2476        if (m_scp_closing || m_scp_closed) {
2477            ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
2478            shot_ext->request_scc = 0;
2479            shot_ext->request_scp = 0;
2480            shot_ext->request_sensor = 0;
2481        }
2482
2483//        ALOGD("### Sensor Qbuf start(%d) SCP(%d) SCC(%d) DIS(%d)", index, shot_ext->request_scp, shot_ext->request_scc, shot_ext->dis_bypass);
2484
2485        cam_int_qbuf(&(m_camera_info.sensor), index);
2486        ALOGV("### Sensor QBUF done");
2487
2488        if (!m_closing){
2489            selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2490        }
2491        return;
2492    }
2493    return;
2494}
2495
2496void ExynosCameraHWInterface2::m_ispThreadInitialize(SignalDrivenThread * self)
2497{
2498    ALOGV("DEBUG(%s): ", __FUNCTION__ );
2499    /* will add */
2500    return;
2501}
2502
2503
2504void ExynosCameraHWInterface2::m_ispThreadFunc(SignalDrivenThread * self)
2505{
2506     ALOGV("DEBUG(%s): ", __FUNCTION__ );
2507    /* will add */
2508    return;
2509}
2510
2511void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self)
2512{
2513    StreamThread *          selfThread      = ((StreamThread*)self);
2514    ALOGV("DEBUG(%s): ", __FUNCTION__ );
2515    memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t));
2516    selfThread->m_isBufferInit = false;
2517
2518    return;
2519}
2520
2521void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
2522{
2523    uint32_t                currentSignal   = self->GetProcessingSignal();
2524    StreamThread *          selfThread      = ((StreamThread*)self);
2525    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
2526    record_parameters_t     *selfRecordParms =  &(selfThread->m_recordParameters);
2527    node_info_t             *currentNode    = &(selfStreamParms->node);
2528
2529    ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal);
2530
2531    if (currentSignal & SIGNAL_STREAM_CHANGE_PARAMETER) {
2532        ALOGV("DEBUG(%s): processing SIGNAL_STREAM_CHANGE_PARAMETER", __FUNCTION__);
2533        selfThread->applyChange();
2534        if (selfStreamParms->streamType == STREAM_TYPE_INDIRECT) {
2535            m_resizeBuf.size.extS[0] = ALIGN(selfStreamParms->outputWidth, 16) * ALIGN(selfStreamParms->outputHeight, 16) * 2;
2536            m_resizeBuf.size.extS[1] = 0;
2537            m_resizeBuf.size.extS[2] = 0;
2538
2539            if (allocCameraMemory(selfStreamParms->ionClient, &m_resizeBuf, 1) == -1) {
2540                ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__);
2541            }
2542        }
2543        ALOGV("DEBUG(%s): processing SIGNAL_STREAM_CHANGE_PARAMETER DONE", __FUNCTION__);
2544    }
2545
2546    if (currentSignal & SIGNAL_THREAD_RELEASE) {
2547        int i, index = -1, cnt_to_dq = 0;
2548        status_t res;
2549        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2550        ALOGD("(%s):(%d) SIGNAL_THREAD_RELEASE", __FUNCTION__, selfStreamParms->streamType);
2551
2552        if (selfThread->m_isBufferInit) {
2553            for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
2554                ALOGV("DEBUG(%s): checking buffer index[%d] - status(%d)",
2555                    __FUNCTION__, i, selfStreamParms->svcBufStatus[i]);
2556                if (selfStreamParms->svcBufStatus[i] ==ON_DRIVER) cnt_to_dq++;
2557            }
2558
2559            ALOGV("DEBUG(%s): calling stream(%d) streamoff (fd:%d)", __FUNCTION__,
2560            selfThread->m_index, selfStreamParms->fd);
2561            if (cam_int_streamoff(&(selfStreamParms->node)) < 0 ){
2562                ALOGE("ERR(%s): stream off fail", __FUNCTION__);
2563            } else {
2564                if (selfStreamParms->streamType == STREAM_TYPE_DIRECT) {
2565                    m_scp_closing = true;
2566                } else {
2567                    m_camera_info.capture.status = false;
2568                }
2569            }
2570            ALOGV("DEBUG(%s): calling stream(%d) streamoff done", __FUNCTION__, selfThread->m_index);
2571            ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 (fd:%d)", __FUNCTION__,
2572                    selfThread->m_index, selfStreamParms->fd);
2573            currentNode->buffers = 0;
2574            cam_int_reqbufs(currentNode);
2575            ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 DONE(fd:%d)", __FUNCTION__,
2576                    selfThread->m_index, selfStreamParms->fd);
2577        }
2578        if (selfThread->m_index == 1 && m_resizeBuf.size.s != 0) {
2579            freeCameraMemory(&m_resizeBuf, 1);
2580        }
2581        selfThread->m_isBufferInit = false;
2582        selfThread->m_index = 255;
2583
2584        selfThread->m_releasing = false;
2585
2586        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
2587
2588        return;
2589    }
2590
2591    if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
2592        buffer_handle_t * buf = NULL;
2593        status_t res;
2594        void *virtAddr[3];
2595        int i, j;
2596        int index;
2597        nsecs_t timestamp;
2598
2599        ALOGV("DEBUG(%s): stream(%d) processing SIGNAL_STREAM_DATA_COMING",
2600            __FUNCTION__,selfThread->m_index);
2601
2602        if (selfStreamParms->streamType == STREAM_TYPE_INDIRECT)
2603        {
2604            ALOGD("stream(%s) processing SIGNAL_STREAM_DATA_COMING",
2605                __FUNCTION__,selfThread->m_index);
2606        }
2607
2608        if (!(selfThread->m_isBufferInit)) {
2609            for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
2610                res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
2611                if (res != NO_ERROR || buf == NULL) {
2612                    ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
2613                    return;
2614                }
2615                ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
2616                   ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
2617
2618                if (m_grallocHal->lock(m_grallocHal, *buf,
2619                           selfStreamParms->usage,
2620                           0, 0, selfStreamParms->outputWidth, selfStreamParms->outputHeight, virtAddr) != 0) {
2621                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
2622                    return;
2623                }
2624                ALOGV("DEBUG(%s): locked img buf plane0(%x) plane1(%x) plane2(%x)",
2625                __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
2626
2627                index = selfThread->findBufferIndex(virtAddr[0]);
2628                if (index == -1) {
2629                    ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
2630                }
2631                else {
2632                    ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
2633                        __FUNCTION__, index, selfStreamParms->svcBufStatus[index]);
2634                    if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC)
2635                        selfStreamParms->svcBufStatus[index] = ON_DRIVER;
2636                    else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE)
2637                        selfStreamParms->svcBufStatus[index] = ON_HAL;
2638                    else {
2639                        ALOGV("DBG(%s): buffer status abnormal (%d) "
2640                            , __FUNCTION__, selfStreamParms->svcBufStatus[index]);
2641                    }
2642                    selfStreamParms->numSvcBufsInHal++;
2643                    if (*buf != selfStreamParms->svcBufHandle[index])
2644                        ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
2645                    else
2646                        ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
2647                }
2648                selfStreamParms->svcBufIndex = 0;
2649            }
2650            selfThread->m_isBufferInit = true;
2651        }
2652
2653        if (m_recordingEnabled && m_needsRecordBufferInit) {
2654            ALOGV("DEBUG(%s): Recording Buffer Initialization numsvcbuf(%d)",
2655                __FUNCTION__, selfRecordParms->numSvcBuffers);
2656            int checkingIndex = 0;
2657            bool found = false;
2658            for ( i=0 ; i < selfRecordParms->numSvcBuffers; i++) {
2659                res = selfRecordParms->streamOps->dequeue_buffer(selfRecordParms->streamOps, &buf);
2660                if (res != NO_ERROR || buf == NULL) {
2661                    ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
2662                    return;
2663                }
2664                selfRecordParms->numSvcBufsInHal++;
2665                ALOGV("DEBUG(%s): [record] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
2666                   selfRecordParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
2667
2668                if (m_grallocHal->lock(m_grallocHal, *buf,
2669                       selfRecordParms->usage, 0, 0,
2670                       selfRecordParms->outputWidth, selfRecordParms->outputHeight, virtAddr) != 0) {
2671                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
2672                }
2673                else {
2674                      ALOGV("DEBUG(%s): [record] locked img buf plane0(%x) plane1(%x) plane2(%x)",
2675                        __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
2676                }
2677                found = false;
2678                for (checkingIndex = 0; checkingIndex < selfRecordParms->numSvcBuffers ; checkingIndex++) {
2679                    if (selfRecordParms->svcBufHandle[checkingIndex] == *buf ) {
2680                        found = true;
2681                        break;
2682                    }
2683                }
2684                ALOGV("DEBUG(%s): [record] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
2685                if (!found) break;
2686
2687                index = checkingIndex;
2688
2689                if (index == -1) {
2690                    ALOGV("ERR(%s): could not find buffer index", __FUNCTION__);
2691                }
2692                else {
2693                    ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
2694                        __FUNCTION__, index, selfRecordParms->svcBufStatus[index]);
2695                    if (selfRecordParms->svcBufStatus[index]== ON_SERVICE)
2696                        selfRecordParms->svcBufStatus[index] = ON_HAL;
2697                    else {
2698                        ALOGV("DBG(%s): buffer status abnormal (%d) "
2699                            , __FUNCTION__, selfRecordParms->svcBufStatus[index]);
2700                    }
2701                    if (*buf != selfRecordParms->svcBufHandle[index])
2702                        ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
2703                    else
2704                        ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
2705                }
2706                selfRecordParms->svcBufIndex = 0;
2707            }
2708            m_needsRecordBufferInit = false;
2709        }
2710
2711        do {
2712            if (selfStreamParms->streamType == STREAM_TYPE_DIRECT) {
2713                ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF START ",__FUNCTION__,
2714                    selfThread->m_index, selfStreamParms->streamType);
2715
2716                index = cam_int_dqbuf(&(selfStreamParms->node));
2717                ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
2718                    selfThread->m_index, selfStreamParms->streamType, index);
2719
2720
2721                if (selfStreamParms->svcBufStatus[index] !=  ON_DRIVER)
2722                    ALOGV("DBG(%s): DQed buffer status abnormal (%d) ",
2723                           __FUNCTION__, selfStreamParms->svcBufStatus[index]);
2724                selfStreamParms->svcBufStatus[index] = ON_HAL;
2725
2726                if (m_recordOutput && m_recordingEnabled) {
2727                    ALOGV("DEBUG(%s): Entering record frame creator, index(%d)",__FUNCTION__, selfRecordParms->svcBufIndex);
2728                    bool found = false;
2729                    for (int i = 0 ; selfRecordParms->numSvcBuffers ; i++) {
2730                        if (selfRecordParms->svcBufStatus[selfRecordParms->svcBufIndex] == ON_HAL) {
2731                            found = true;
2732                            break;
2733                        }
2734                        selfRecordParms->svcBufIndex++;
2735                        if (selfRecordParms->svcBufIndex >= selfRecordParms->numSvcBuffers)
2736                            selfRecordParms->svcBufIndex = 0;
2737                    }
2738                    if (!found) {
2739                        ALOGE("(%s): cannot find free recording buffer", __FUNCTION__);
2740                        selfRecordParms->svcBufIndex++;
2741                        break;
2742                    }
2743
2744                    if (m_exynosVideoCSC) {
2745                        int videoW = selfRecordParms->outputWidth, videoH = selfRecordParms->outputHeight;
2746                        int cropX, cropY, cropW, cropH = 0;
2747                        int previewW = selfStreamParms->outputWidth, previewH = selfStreamParms->outputHeight;
2748                        m_getRatioSize(previewW, previewH,
2749                                       videoW, videoH,
2750                                       &cropX, &cropY,
2751                                       &cropW, &cropH,
2752                                       0);
2753
2754                        ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
2755                                 __FUNCTION__, cropX, cropY, cropW, cropH);
2756
2757                        csc_set_src_format(m_exynosVideoCSC,
2758                                           previewW, previewH,
2759                                           cropX, cropY, cropW, cropH,
2760                                           HAL_PIXEL_FORMAT_EXYNOS_YV12,
2761                                           0);
2762
2763                        csc_set_dst_format(m_exynosVideoCSC,
2764                                           videoW, videoH,
2765                                           0, 0, videoW, videoH,
2766                                           selfRecordParms->outputFormat,
2767                                           1);
2768
2769                        csc_set_src_buffer(m_exynosVideoCSC,
2770                                       (void **)(&(selfStreamParms->svcBuffers[index].fd.fd)));
2771
2772                        csc_set_dst_buffer(m_exynosVideoCSC,
2773                            (void **)(&(selfRecordParms->svcBuffers[selfRecordParms->svcBufIndex].fd.fd)));
2774
2775                        if (csc_convert(m_exynosVideoCSC) != 0) {
2776                            ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__);
2777                        }
2778                        else {
2779                            ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__);
2780                        }
2781                    }
2782                    else {
2783                        ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
2784                    }
2785
2786                    res = selfRecordParms->streamOps->enqueue_buffer(selfRecordParms->streamOps,
2787                            systemTime(),
2788                            &(selfRecordParms->svcBufHandle[selfRecordParms->svcBufIndex]));
2789                    ALOGV("DEBUG(%s): stream(%d) record enqueue_buffer to svc done res(%d)", __FUNCTION__,
2790                        selfThread->m_index, res);
2791                    if (res == 0) {
2792                        selfRecordParms->svcBufStatus[selfRecordParms->svcBufIndex] = ON_SERVICE;
2793                        selfRecordParms->numSvcBufsInHal--;
2794                    }
2795                }
2796                if (m_previewOutput && m_requestManager->GetSkipCnt() <= 0) {
2797
2798                    ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex());
2799                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
2800                            m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex()),
2801                            &(selfStreamParms->svcBufHandle[index]));
2802
2803                    ALOGV("DEBUG(%s): stream(%d) enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
2804                }
2805                else {
2806                    res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
2807                            &(selfStreamParms->svcBufHandle[index]));
2808                    ALOGV("DEBUG(%s): stream(%d) cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
2809                }
2810                if (res == 0) {
2811                    selfStreamParms->svcBufStatus[index] = ON_SERVICE;
2812                    selfStreamParms->numSvcBufsInHal--;
2813                }
2814                else {
2815                    selfStreamParms->svcBufStatus[index] = ON_HAL;
2816                }
2817            }
2818            else if (selfStreamParms->streamType == STREAM_TYPE_INDIRECT) {
2819                ExynosRect jpegRect;
2820                bool found = false;
2821                bool ret = false;
2822                int pictureW, pictureH, pictureFramesize = 0;
2823                int pictureFormat;
2824                int cropX, cropY, cropW, cropH = 0;
2825                ExynosBuffer resizeBufInfo;
2826                ExynosRect   m_orgPictureRect;
2827
2828                ALOGD("DEBUG(%s): stream(%d) type(%d) DQBUF START ",__FUNCTION__,
2829                    selfThread->m_index, selfStreamParms->streamType);
2830                index = cam_int_dqbuf(&(selfStreamParms->node));
2831                ALOGD("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
2832                    selfThread->m_index, selfStreamParms->streamType, index);
2833
2834
2835                for (int i = 0; i < selfStreamParms->numSvcBuffers ; i++) {
2836                    if (selfStreamParms->svcBufStatus[selfStreamParms->svcBufIndex] == ON_HAL) {
2837                        found = true;
2838                        break;
2839                    }
2840                    selfStreamParms->svcBufIndex++;
2841                    if (selfStreamParms->svcBufIndex >= selfStreamParms->numSvcBuffers)
2842                        selfStreamParms->svcBufIndex = 0;
2843                }
2844                if (!found) {
2845                    ALOGE("ERR(%s): NO free SVC buffer for JPEG", __FUNCTION__);
2846                    break;
2847                }
2848
2849                m_orgPictureRect.w = selfStreamParms->outputWidth;
2850                m_orgPictureRect.h = selfStreamParms->outputHeight;
2851
2852                ExynosBuffer* m_pictureBuf = &(m_camera_info.capture.buffer[index]);
2853
2854                pictureW = selfStreamParms->nodeWidth;
2855                pictureH = selfStreamParms->nodeHeight;
2856                pictureFormat = V4L2_PIX_FMT_YUYV;
2857                pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
2858
2859                if (m_exynosPictureCSC) {
2860                    m_getRatioSize(pictureW, pictureH,
2861                                   m_orgPictureRect.w, m_orgPictureRect.h,
2862                                   &cropX, &cropY,
2863                                   &cropW, &cropH,
2864                                   0);
2865
2866                    ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
2867                          __FUNCTION__, cropX, cropY, cropW, cropH);
2868
2869                    csc_set_src_format(m_exynosPictureCSC,
2870                                       ALIGN(pictureW, 16), ALIGN(pictureH, 16),
2871                                       cropX, cropY, cropW, cropH,
2872                                       V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
2873                                       0);
2874
2875                    csc_set_dst_format(m_exynosPictureCSC,
2876                                       m_orgPictureRect.w, m_orgPictureRect.h,
2877                                       0, 0, m_orgPictureRect.w, m_orgPictureRect.h,
2878                                       V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
2879                                       0);
2880                    csc_set_src_buffer(m_exynosPictureCSC,
2881                                       (void **)&m_pictureBuf->fd.fd);
2882
2883                    csc_set_dst_buffer(m_exynosPictureCSC,
2884                                       (void **)&m_resizeBuf.fd.fd);
2885                    for (int i = 0 ; i < 3 ; i++)
2886                        ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d",
2887                            __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
2888
2889                    if (csc_convert(m_exynosPictureCSC) != 0)
2890                        ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__);
2891
2892
2893                }
2894                else {
2895                    ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__);
2896                }
2897
2898                resizeBufInfo = m_resizeBuf;
2899
2900                m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_orgPictureRect.w, m_orgPictureRect.h, &m_resizeBuf);
2901
2902                for (int i = 1; i < 3; i++) {
2903                    if (m_resizeBuf.size.extS[i] != 0)
2904                        m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1];
2905
2906                    ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]);
2907                }
2908
2909                jpegRect.w = m_orgPictureRect.w;
2910                jpegRect.h = m_orgPictureRect.h;
2911                jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
2912
2913                if (yuv2Jpeg(&m_resizeBuf, &selfStreamParms->svcBuffers[selfStreamParms->svcBufIndex], &jpegRect) == false)
2914                    ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
2915                cam_int_qbuf(&(selfStreamParms->node), index);
2916                ALOGV("DEBUG(%s): stream(%d) type(%d) QBUF DONE ",__FUNCTION__,
2917                    selfThread->m_index, selfStreamParms->streamType);
2918
2919                m_resizeBuf = resizeBufInfo;
2920
2921                res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, systemTime(), &(selfStreamParms->svcBufHandle[selfStreamParms->svcBufIndex]));
2922
2923                ALOGV("DEBUG(%s): stream(%d) enqueue_buffer index(%d) to svc done res(%d)",
2924                        __FUNCTION__, selfThread->m_index, selfStreamParms->svcBufIndex, res);
2925                if (res == 0) {
2926                    selfStreamParms->svcBufStatus[selfStreamParms->svcBufIndex] = ON_SERVICE;
2927                    selfStreamParms->numSvcBufsInHal--;
2928                }
2929                else {
2930                    selfStreamParms->svcBufStatus[selfStreamParms->svcBufIndex] = ON_HAL;
2931                }
2932            }
2933        }
2934        while (0);
2935
2936        if (selfStreamParms->streamType == STREAM_TYPE_DIRECT  && m_recordOutput && m_recordingEnabled) {
2937            do {
2938                ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , selfRecordParms->numSvcBufsInHal);
2939                if (selfRecordParms->numSvcBufsInHal >= 1)
2940                {
2941                    ALOGV("DEBUG(%s): breaking", __FUNCTION__);
2942                    break;
2943                }
2944                res = selfRecordParms->streamOps->dequeue_buffer(selfRecordParms->streamOps, &buf);
2945                if (res != NO_ERROR || buf == NULL) {
2946                    ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
2947                    break;
2948                }
2949                selfRecordParms->numSvcBufsInHal ++;
2950                ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
2951                   selfRecordParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
2952
2953                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
2954                bool found = false;
2955                int checkingIndex = 0;
2956                for (checkingIndex = 0; checkingIndex < selfRecordParms->numSvcBuffers ; checkingIndex++) {
2957                    if (priv_handle->fd == selfRecordParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
2958                        found = true;
2959                        break;
2960                    }
2961                }
2962                ALOGV("DEBUG(%s): recording dequeueed_buffer found index(%d)", __FUNCTION__, found);
2963
2964                if (!found) {
2965                     break;
2966                }
2967
2968                index = checkingIndex;
2969                if (selfRecordParms->svcBufStatus[index] == ON_SERVICE) {
2970                    selfRecordParms->svcBufStatus[index] = ON_HAL;
2971                }
2972                else {
2973                    ALOGV("DEBUG(%s): record bufstatus abnormal [%d]  status = %d", __FUNCTION__,
2974                        index,  selfRecordParms->svcBufStatus[index]);
2975                }
2976            } while (0);
2977        }
2978        if (selfStreamParms->streamType == STREAM_TYPE_DIRECT) {
2979            while (selfStreamParms->numSvcBufsInHal < selfStreamParms->numOwnSvcBuffers) {
2980                res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
2981                if (res != NO_ERROR || buf == NULL) {
2982                    ALOGV("DEBUG(%s): stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
2983                    break;
2984                }
2985                selfStreamParms->numSvcBufsInHal++;
2986                ALOGV("DEBUG(%s): stream(%d) got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
2987                    selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
2988                   ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
2989                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
2990
2991                bool found = false;
2992                int checkingIndex = 0;
2993                for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
2994                    if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
2995                        found = true;
2996                        break;
2997                    }
2998                }
2999                ALOGV("DEBUG(%s): post_dequeue_buffer found(%d)", __FUNCTION__, found);
3000                if (!found) break;
3001                ALOGV("DEBUG(%s): preparing to qbuf [%d]", __FUNCTION__, checkingIndex);
3002                index = checkingIndex;
3003                if (index < selfStreamParms->numHwBuffers) {
3004                    uint32_t    plane_index = 0;
3005                    ExynosBuffer*  currentBuf = &(selfStreamParms->svcBuffers[index]);
3006                    struct v4l2_buffer v4l2_buf;
3007                    struct v4l2_plane  planes[VIDEO_MAX_PLANES];
3008
3009                    v4l2_buf.m.planes   = planes;
3010                    v4l2_buf.type       = currentNode->type;
3011                    v4l2_buf.memory     = currentNode->memory;
3012                    v4l2_buf.index      = index;
3013                    v4l2_buf.length     = currentNode->planes;
3014
3015                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
3016                    v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
3017                    v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
3018                    for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
3019                        v4l2_buf.m.planes[plane_index].length  = currentBuf->size.extS[plane_index];
3020                        ALOGV("DEBUG(%s): plane(%d): fd(%d)  length(%d)",
3021                             __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
3022                             v4l2_buf.m.planes[plane_index].length);
3023                    }
3024                    if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
3025                        ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail",
3026                            __FUNCTION__, selfThread->m_index);
3027                        return;
3028                    }
3029                    selfStreamParms->svcBufStatus[index] = ON_DRIVER;
3030                    ALOGV("DEBUG(%s): stream id(%d) type0 QBUF done index(%d)",
3031                        __FUNCTION__, selfThread->m_index, index);
3032                }
3033            }
3034        }
3035        else if (selfStreamParms->streamType == STREAM_TYPE_INDIRECT) {
3036            while (selfStreamParms->numSvcBufsInHal < selfStreamParms->numOwnSvcBuffers) {
3037                res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3038                if (res != NO_ERROR || buf == NULL) {
3039                    ALOGV("DEBUG(%s): stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
3040                    break;
3041                }
3042
3043                ALOGV("DEBUG(%s): stream(%d) got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
3044                    selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
3045                   ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3046
3047                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3048
3049                bool found = false;
3050                int checkingIndex = 0;
3051                for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3052                    if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3053                        found = true;
3054                        break;
3055                    }
3056                }
3057                if (!found) break;
3058                selfStreamParms->svcBufStatus[checkingIndex] = ON_HAL;
3059                selfStreamParms->numSvcBufsInHal++;
3060            }
3061
3062        }
3063        ALOGV("DEBUG(%s): stream(%d) processing SIGNAL_STREAM_DATA_COMING DONE",
3064            __FUNCTION__,selfThread->m_index);
3065    }
3066    return;
3067}
3068
3069bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
3070                            ExynosBuffer *jpegBuf,
3071                            ExynosRect *rect)
3072{
3073    unsigned char *addr;
3074
3075    int thumbW = 320;
3076    int thumbH = 240;
3077
3078    ExynosJpegEncoderForCamera jpegEnc;
3079    bool ret = false;
3080    int res = 0;
3081
3082    unsigned int *yuvSize = yuvBuf->size.extS;
3083
3084    if (jpegEnc.create()) {
3085        ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__);
3086        goto jpeg_encode_done;
3087    }
3088
3089    if (jpegEnc.setQuality(100)) {
3090        ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__);
3091        goto jpeg_encode_done;
3092    }
3093
3094    if (jpegEnc.setSize(rect->w, rect->h)) {
3095        ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__);
3096        goto jpeg_encode_done;
3097    }
3098    ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h);
3099
3100    if (jpegEnc.setColorFormat(rect->colorFormat)) {
3101        ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__);
3102        goto jpeg_encode_done;
3103    }
3104
3105    if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) {
3106        ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
3107        goto jpeg_encode_done;
3108    }
3109
3110    mExifInfo.enableThumb = true;
3111
3112    if (jpegEnc.setThumbnailSize(thumbW, thumbH)) {
3113        ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, thumbW, thumbH);
3114        goto jpeg_encode_done;
3115    }
3116
3117    ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, thumbW, thumbH);
3118    if (jpegEnc.setThumbnailQuality(50)) {
3119        ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__);
3120        goto jpeg_encode_done;
3121    }
3122
3123    m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata);
3124    ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
3125    if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) {
3126        ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
3127        goto jpeg_encode_done;
3128    }
3129    if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) {
3130        ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__);
3131        goto jpeg_encode_done;
3132    }
3133
3134    if (jpegEnc.updateConfig()) {
3135        ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__);
3136        goto jpeg_encode_done;
3137    }
3138
3139    if (res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo)) {
3140        ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res);
3141        goto jpeg_encode_done;
3142    }
3143
3144    ret = true;
3145
3146jpeg_encode_done:
3147
3148    if (jpegEnc.flagCreate() == true)
3149        jpegEnc.destroy();
3150
3151    return ret;
3152}
3153
3154
3155void ExynosCameraHWInterface2::OnAfTrigger(int id)
3156{
3157    switch (m_afMode) {
3158    case AA_AFMODE_AUTO:
3159    case AA_AFMODE_MACRO:
3160        OnAfTriggerAutoMacro(id);
3161        break;
3162    case AA_AFMODE_CONTINUOUS_VIDEO:
3163        OnAfTriggerCAFVideo(id);
3164        break;
3165    case AA_AFMODE_CONTINUOUS_PICTURE:
3166        OnAfTriggerCAFPicture(id);
3167        break;
3168    case AA_AFMODE_OFF:
3169    default:
3170        break;
3171    }
3172}
3173
3174void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int id)
3175{
3176    int nextState = NO_TRANSITION;
3177    m_afTriggerId = id;
3178
3179    switch (m_afState) {
3180    case HAL_AFSTATE_INACTIVE:
3181        nextState = HAL_AFSTATE_NEEDS_COMMAND;
3182        m_IsAfTriggerRequired = true;
3183        break;
3184    case HAL_AFSTATE_NEEDS_COMMAND:
3185        nextState = NO_TRANSITION;
3186        break;
3187    case HAL_AFSTATE_STARTED:
3188        nextState = NO_TRANSITION;
3189        break;
3190    case HAL_AFSTATE_SCANNING:
3191        nextState = NO_TRANSITION;
3192        break;
3193    case HAL_AFSTATE_LOCKED:
3194        nextState = HAL_AFSTATE_NEEDS_COMMAND;
3195        m_IsAfTriggerRequired = true;
3196        break;
3197    case HAL_AFSTATE_FAILED:
3198        nextState = HAL_AFSTATE_NEEDS_COMMAND;
3199        m_IsAfTriggerRequired = true;
3200        break;
3201    default:
3202        break;
3203    }
3204    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
3205    if (nextState != NO_TRANSITION)
3206        m_afState = nextState;
3207}
3208
3209void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id)
3210{
3211    int nextState = NO_TRANSITION;
3212    m_afTriggerId = id;
3213
3214    switch (m_afState) {
3215    case HAL_AFSTATE_INACTIVE:
3216        nextState = HAL_AFSTATE_FAILED;
3217        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3218        break;
3219    case HAL_AFSTATE_NEEDS_COMMAND:
3220        // not used
3221        break;
3222    case HAL_AFSTATE_STARTED:
3223        nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
3224        break;
3225    case HAL_AFSTATE_SCANNING:
3226        nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
3227        break;
3228    case HAL_AFSTATE_NEEDS_DETERMINATION:
3229        nextState = NO_TRANSITION;
3230        break;
3231    case HAL_AFSTATE_PASSIVE_FOCUSED:
3232        m_IsAfLockRequired = true;
3233        SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
3234        nextState = HAL_AFSTATE_LOCKED;
3235        break;
3236    case HAL_AFSTATE_LOCKED:
3237        nextState = NO_TRANSITION;
3238        break;
3239    case HAL_AFSTATE_FAILED:
3240        nextState = NO_TRANSITION;
3241        break;
3242    default:
3243        break;
3244    }
3245    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
3246    if (nextState != NO_TRANSITION)
3247        m_afState = nextState;
3248}
3249
3250
3251void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int id)
3252{
3253    int nextState = NO_TRANSITION;
3254    m_afTriggerId = id;
3255
3256    switch (m_afState) {
3257    case HAL_AFSTATE_INACTIVE:
3258        nextState = HAL_AFSTATE_FAILED;
3259        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3260        break;
3261    case HAL_AFSTATE_NEEDS_COMMAND:
3262        // not used
3263        break;
3264    case HAL_AFSTATE_STARTED:
3265        m_IsAfLockRequired = true;
3266        nextState = HAL_AFSTATE_FAILED;
3267        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3268        break;
3269    case HAL_AFSTATE_SCANNING:
3270        m_IsAfLockRequired = true;
3271        nextState = HAL_AFSTATE_FAILED;
3272        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3273        break;
3274    case HAL_AFSTATE_NEEDS_DETERMINATION:
3275        // not used
3276        break;
3277    case HAL_AFSTATE_PASSIVE_FOCUSED:
3278        m_IsAfLockRequired = true;
3279        SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
3280        nextState = HAL_AFSTATE_LOCKED;
3281        break;
3282    case HAL_AFSTATE_LOCKED:
3283        nextState = NO_TRANSITION;
3284        break;
3285    case HAL_AFSTATE_FAILED:
3286        nextState = NO_TRANSITION;
3287        break;
3288    default:
3289        break;
3290    }
3291    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
3292    if (nextState != NO_TRANSITION)
3293        m_afState = nextState;
3294}
3295
3296void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti)
3297{
3298    switch (m_afMode) {
3299    case AA_AFMODE_AUTO:
3300    case AA_AFMODE_MACRO:
3301        OnAfNotificationAutoMacro(noti);
3302        break;
3303    case AA_AFMODE_CONTINUOUS_VIDEO:
3304        OnAfNotificationCAFVideo(noti);
3305        break;
3306    case AA_AFMODE_CONTINUOUS_PICTURE:
3307        OnAfNotificationCAFPicture(noti);
3308        break;
3309    case AA_AFMODE_OFF:
3310    default:
3311        break;
3312    }
3313}
3314
3315void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti)
3316{
3317    int nextState = NO_TRANSITION;
3318    bool bWrongTransition = false;
3319
3320    if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) {
3321        switch (noti) {
3322        case AA_AFSTATE_INACTIVE:
3323        case AA_AFSTATE_ACTIVE_SCAN:
3324        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3325        case AA_AFSTATE_AF_FAILED_FOCUS:
3326        default:
3327            nextState = NO_TRANSITION;
3328            break;
3329        }
3330    }
3331    else if (m_afState == HAL_AFSTATE_STARTED) {
3332        switch (noti) {
3333        case AA_AFSTATE_INACTIVE:
3334            nextState = NO_TRANSITION;
3335            break;
3336        case AA_AFSTATE_ACTIVE_SCAN:
3337            nextState = HAL_AFSTATE_SCANNING;
3338            SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN);
3339            break;
3340        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3341            nextState = NO_TRANSITION;
3342            break;
3343        case AA_AFSTATE_AF_FAILED_FOCUS:
3344            nextState = NO_TRANSITION;
3345            break;
3346        default:
3347            bWrongTransition = true;
3348            break;
3349        }
3350    }
3351    else if (m_afState == HAL_AFSTATE_SCANNING) {
3352        switch (noti) {
3353        case AA_AFSTATE_INACTIVE:
3354            bWrongTransition = true;
3355            break;
3356        case AA_AFSTATE_ACTIVE_SCAN:
3357            nextState = NO_TRANSITION;
3358            break;
3359        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3360            nextState = HAL_AFSTATE_LOCKED;
3361            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
3362            break;
3363        case AA_AFSTATE_AF_FAILED_FOCUS:
3364            nextState = HAL_AFSTATE_FAILED;
3365            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3366            break;
3367        default:
3368            bWrongTransition = true;
3369            break;
3370        }
3371    }
3372    else if (m_afState == HAL_AFSTATE_LOCKED) {
3373        switch (noti) {
3374            case AA_AFSTATE_INACTIVE:
3375            case AA_AFSTATE_ACTIVE_SCAN:
3376                bWrongTransition = true;
3377                break;
3378            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3379                nextState = NO_TRANSITION;
3380                break;
3381            case AA_AFSTATE_AF_FAILED_FOCUS:
3382            default:
3383                bWrongTransition = true;
3384                break;
3385        }
3386    }
3387    else if (m_afState == HAL_AFSTATE_FAILED) {
3388        switch (noti) {
3389            case AA_AFSTATE_INACTIVE:
3390            case AA_AFSTATE_ACTIVE_SCAN:
3391            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3392                bWrongTransition = true;
3393                break;
3394            case AA_AFSTATE_AF_FAILED_FOCUS:
3395                nextState = NO_TRANSITION;
3396                break;
3397            default:
3398                bWrongTransition = true;
3399                break;
3400        }
3401    }
3402    if (bWrongTransition) {
3403        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
3404        return;
3405    }
3406    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
3407    if (nextState != NO_TRANSITION)
3408        m_afState = nextState;
3409}
3410
3411void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti)
3412{
3413    int nextState = NO_TRANSITION;
3414    bool bWrongTransition = false;
3415
3416    if (m_afState == HAL_AFSTATE_INACTIVE) {
3417        switch (noti) {
3418        case AA_AFSTATE_INACTIVE:
3419        case AA_AFSTATE_ACTIVE_SCAN:
3420        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3421        case AA_AFSTATE_AF_FAILED_FOCUS:
3422        default:
3423            nextState = NO_TRANSITION;
3424            break;
3425        }
3426    }
3427    else if (m_afState == HAL_AFSTATE_STARTED) {
3428        switch (noti) {
3429        case AA_AFSTATE_INACTIVE:
3430            nextState = NO_TRANSITION;
3431            break;
3432        case AA_AFSTATE_ACTIVE_SCAN:
3433            nextState = HAL_AFSTATE_SCANNING;
3434            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
3435            break;
3436        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3437            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
3438            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
3439            break;
3440        case AA_AFSTATE_AF_FAILED_FOCUS:
3441            nextState = HAL_AFSTATE_FAILED;
3442            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3443            break;
3444        default:
3445            bWrongTransition = true;
3446            break;
3447        }
3448    }
3449    else if (m_afState == HAL_AFSTATE_SCANNING) {
3450        switch (noti) {
3451        case AA_AFSTATE_INACTIVE:
3452            bWrongTransition = true;
3453            break;
3454        case AA_AFSTATE_ACTIVE_SCAN:
3455            nextState = NO_TRANSITION;
3456            break;
3457        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3458            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
3459            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
3460            break;
3461        case AA_AFSTATE_AF_FAILED_FOCUS:
3462            nextState = NO_TRANSITION;
3463            break;
3464        default:
3465            bWrongTransition = true;
3466            break;
3467        }
3468    }
3469    else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
3470        switch (noti) {
3471        case AA_AFSTATE_INACTIVE:
3472            bWrongTransition = true;
3473            break;
3474        case AA_AFSTATE_ACTIVE_SCAN:
3475            nextState = HAL_AFSTATE_SCANNING;
3476            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
3477            break;
3478        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3479            nextState = NO_TRANSITION;
3480            break;
3481        case AA_AFSTATE_AF_FAILED_FOCUS:
3482            nextState = HAL_AFSTATE_FAILED;
3483            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3484            break;
3485        default:
3486            bWrongTransition = true;
3487            break;
3488        }
3489    }
3490    else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
3491        switch (noti) {
3492        case AA_AFSTATE_INACTIVE:
3493            bWrongTransition = true;
3494            break;
3495        case AA_AFSTATE_ACTIVE_SCAN:
3496            nextState = NO_TRANSITION;
3497            break;
3498        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3499            m_IsAfLockRequired = true;
3500            nextState = HAL_AFSTATE_LOCKED;
3501            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
3502            break;
3503        case AA_AFSTATE_AF_FAILED_FOCUS:
3504            m_IsAfLockRequired = true;
3505            nextState = HAL_AFSTATE_FAILED;
3506            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3507            break;
3508        default:
3509            bWrongTransition = true;
3510            break;
3511        }
3512    }
3513    else if (m_afState == HAL_AFSTATE_LOCKED) {
3514        switch (noti) {
3515            case AA_AFSTATE_INACTIVE:
3516                nextState = NO_TRANSITION;
3517                break;
3518            case AA_AFSTATE_ACTIVE_SCAN:
3519                bWrongTransition = true;
3520                break;
3521            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3522                nextState = NO_TRANSITION;
3523                break;
3524            case AA_AFSTATE_AF_FAILED_FOCUS:
3525            default:
3526                bWrongTransition = true;
3527                break;
3528        }
3529    }
3530    else if (m_afState == HAL_AFSTATE_FAILED) {
3531        switch (noti) {
3532            case AA_AFSTATE_INACTIVE:
3533            case AA_AFSTATE_ACTIVE_SCAN:
3534            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3535                bWrongTransition = true;
3536                break;
3537            case AA_AFSTATE_AF_FAILED_FOCUS:
3538                nextState = NO_TRANSITION;
3539                break;
3540            default:
3541                bWrongTransition = true;
3542                break;
3543        }
3544    }
3545    if (bWrongTransition) {
3546        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
3547        return;
3548    }
3549    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
3550    if (nextState != NO_TRANSITION)
3551        m_afState = nextState;
3552}
3553
3554void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti)
3555{
3556    int nextState = NO_TRANSITION;
3557    bool bWrongTransition = false;
3558
3559    if (m_afState == HAL_AFSTATE_INACTIVE) {
3560        switch (noti) {
3561        case AA_AFSTATE_INACTIVE:
3562        case AA_AFSTATE_ACTIVE_SCAN:
3563        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3564        case AA_AFSTATE_AF_FAILED_FOCUS:
3565        default:
3566            nextState = NO_TRANSITION;
3567            break;
3568        }
3569    }
3570    else if (m_afState == HAL_AFSTATE_STARTED) {
3571        switch (noti) {
3572        case AA_AFSTATE_INACTIVE:
3573            nextState = NO_TRANSITION;
3574            break;
3575        case AA_AFSTATE_ACTIVE_SCAN:
3576            nextState = HAL_AFSTATE_SCANNING;
3577            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
3578            break;
3579        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3580            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
3581            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
3582            break;
3583        case AA_AFSTATE_AF_FAILED_FOCUS:
3584            nextState = HAL_AFSTATE_FAILED;
3585            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3586            break;
3587        default:
3588            bWrongTransition = true;
3589            break;
3590        }
3591    }
3592    else if (m_afState == HAL_AFSTATE_SCANNING) {
3593        switch (noti) {
3594        case AA_AFSTATE_INACTIVE:
3595            bWrongTransition = true;
3596            break;
3597        case AA_AFSTATE_ACTIVE_SCAN:
3598            nextState = NO_TRANSITION;
3599            break;
3600        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3601            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
3602            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
3603            break;
3604        case AA_AFSTATE_AF_FAILED_FOCUS:
3605            nextState = NO_TRANSITION;
3606            break;
3607        default:
3608            bWrongTransition = true;
3609            break;
3610        }
3611    }
3612    else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
3613        switch (noti) {
3614        case AA_AFSTATE_INACTIVE:
3615            bWrongTransition = true;
3616            break;
3617        case AA_AFSTATE_ACTIVE_SCAN:
3618            nextState = HAL_AFSTATE_SCANNING;
3619            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
3620            break;
3621        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3622            nextState = NO_TRANSITION;
3623            break;
3624        case AA_AFSTATE_AF_FAILED_FOCUS:
3625            nextState = HAL_AFSTATE_FAILED;
3626            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3627            break;
3628        default:
3629            bWrongTransition = true;
3630            break;
3631        }
3632    }
3633    else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
3634        switch (noti) {
3635        case AA_AFSTATE_INACTIVE:
3636            bWrongTransition = true;
3637            break;
3638        case AA_AFSTATE_ACTIVE_SCAN:
3639            nextState = NO_TRANSITION;
3640            break;
3641        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3642            m_IsAfLockRequired = true;
3643            nextState = HAL_AFSTATE_LOCKED;
3644            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
3645            break;
3646        case AA_AFSTATE_AF_FAILED_FOCUS:
3647            nextState = HAL_AFSTATE_FAILED;
3648            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3649            break;
3650        default:
3651            bWrongTransition = true;
3652            break;
3653        }
3654    }
3655    else if (m_afState == HAL_AFSTATE_LOCKED) {
3656        switch (noti) {
3657            case AA_AFSTATE_INACTIVE:
3658                nextState = NO_TRANSITION;
3659                break;
3660            case AA_AFSTATE_ACTIVE_SCAN:
3661                bWrongTransition = true;
3662                break;
3663            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3664                nextState = NO_TRANSITION;
3665                break;
3666            case AA_AFSTATE_AF_FAILED_FOCUS:
3667            default:
3668                bWrongTransition = true;
3669                break;
3670        }
3671    }
3672    else if (m_afState == HAL_AFSTATE_FAILED) {
3673        switch (noti) {
3674            case AA_AFSTATE_INACTIVE:
3675            case AA_AFSTATE_ACTIVE_SCAN:
3676            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3677                bWrongTransition = true;
3678                break;
3679            case AA_AFSTATE_AF_FAILED_FOCUS:
3680                nextState = NO_TRANSITION;
3681                break;
3682            default:
3683                bWrongTransition = true;
3684                break;
3685        }
3686    }
3687    if (bWrongTransition) {
3688        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
3689        return;
3690    }
3691    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
3692    if (nextState != NO_TRANSITION)
3693        m_afState = nextState;
3694}
3695
3696void ExynosCameraHWInterface2::OnAfCancel(int id)
3697{
3698    switch (m_afMode) {
3699    case AA_AFMODE_AUTO:
3700    case AA_AFMODE_MACRO:
3701        OnAfCancelAutoMacro(id);
3702        break;
3703    case AA_AFMODE_CONTINUOUS_VIDEO:
3704        OnAfCancelCAFVideo(id);
3705        break;
3706    case AA_AFMODE_CONTINUOUS_PICTURE:
3707        OnAfCancelCAFPicture(id);
3708        break;
3709    case AA_AFMODE_OFF:
3710    default:
3711        break;
3712    }
3713}
3714
3715void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int id)
3716{
3717    int nextState = NO_TRANSITION;
3718    m_afTriggerId = id;
3719
3720    switch (m_afState) {
3721    case HAL_AFSTATE_INACTIVE:
3722        nextState = NO_TRANSITION;
3723        break;
3724    case HAL_AFSTATE_NEEDS_COMMAND:
3725    case HAL_AFSTATE_STARTED:
3726    case HAL_AFSTATE_SCANNING:
3727    case HAL_AFSTATE_LOCKED:
3728    case HAL_AFSTATE_FAILED:
3729        SetAfMode(AA_AFMODE_OFF);
3730        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
3731        nextState = HAL_AFSTATE_INACTIVE;
3732        break;
3733    default:
3734        break;
3735    }
3736    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
3737    if (nextState != NO_TRANSITION)
3738        m_afState = nextState;
3739}
3740
3741void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int id)
3742{
3743    int nextState = NO_TRANSITION;
3744    m_afTriggerId = id;
3745
3746    switch (m_afState) {
3747    case HAL_AFSTATE_INACTIVE:
3748        nextState = NO_TRANSITION;
3749        break;
3750    case HAL_AFSTATE_NEEDS_COMMAND:
3751    case HAL_AFSTATE_STARTED:
3752    case HAL_AFSTATE_SCANNING:
3753    case HAL_AFSTATE_LOCKED:
3754    case HAL_AFSTATE_FAILED:
3755    case HAL_AFSTATE_NEEDS_DETERMINATION:
3756    case HAL_AFSTATE_PASSIVE_FOCUSED:
3757        SetAfMode(AA_AFMODE_OFF);
3758        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
3759        SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE);
3760        nextState = HAL_AFSTATE_INACTIVE;
3761        break;
3762    default:
3763        break;
3764    }
3765    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
3766    if (nextState != NO_TRANSITION)
3767        m_afState = nextState;
3768}
3769
3770void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int id)
3771{
3772    int nextState = NO_TRANSITION;
3773    m_afTriggerId = id;
3774
3775    switch (m_afState) {
3776    case HAL_AFSTATE_INACTIVE:
3777        nextState = NO_TRANSITION;
3778        break;
3779    case HAL_AFSTATE_NEEDS_COMMAND:
3780    case HAL_AFSTATE_STARTED:
3781    case HAL_AFSTATE_SCANNING:
3782    case HAL_AFSTATE_LOCKED:
3783    case HAL_AFSTATE_FAILED:
3784    case HAL_AFSTATE_NEEDS_DETERMINATION:
3785    case HAL_AFSTATE_PASSIVE_FOCUSED:
3786        SetAfMode(AA_AFMODE_OFF);
3787        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
3788        SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO);
3789        nextState = HAL_AFSTATE_INACTIVE;
3790        break;
3791    default:
3792        break;
3793    }
3794    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
3795    if (nextState != NO_TRANSITION)
3796        m_afState = nextState;
3797}
3798
3799void ExynosCameraHWInterface2::SetAfStateForService(int newState)
3800{
3801    m_serviceAfState = newState;
3802    m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie);
3803}
3804
3805int ExynosCameraHWInterface2::GetAfStateForService()
3806{
3807   return m_serviceAfState;
3808}
3809
3810void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode)
3811{
3812    if (m_afMode != afMode) {
3813        if (m_IsAfModeUpdateRequired) {
3814            m_afMode2 = afMode;
3815            ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode);
3816        }
3817        else {
3818            ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode);
3819            m_IsAfModeUpdateRequired = true;
3820            m_afMode = afMode;
3821        }
3822    }
3823}
3824
3825void ExynosCameraHWInterface2::m_setExifFixedAttribute(void)
3826{
3827    char property[PROPERTY_VALUE_MAX];
3828
3829    //2 0th IFD TIFF Tags
3830    //3 Maker
3831    property_get("ro.product.brand", property, EXIF_DEF_MAKER);
3832    strncpy((char *)mExifInfo.maker, property,
3833                sizeof(mExifInfo.maker) - 1);
3834    mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0';
3835    //3 Model
3836    property_get("ro.product.model", property, EXIF_DEF_MODEL);
3837    strncpy((char *)mExifInfo.model, property,
3838                sizeof(mExifInfo.model) - 1);
3839    mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0';
3840    //3 Software
3841    property_get("ro.build.id", property, EXIF_DEF_SOFTWARE);
3842    strncpy((char *)mExifInfo.software, property,
3843                sizeof(mExifInfo.software) - 1);
3844    mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0';
3845
3846    //3 YCbCr Positioning
3847    mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING;
3848
3849    //2 0th IFD Exif Private Tags
3850    //3 F Number
3851    mExifInfo.fnumber.num = EXIF_DEF_FNUMBER_NUM;
3852    mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN;
3853    //3 Exposure Program
3854    mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM;
3855    //3 Exif Version
3856    memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version));
3857    //3 Aperture
3858    uint32_t av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den);
3859    mExifInfo.aperture.num = av*EXIF_DEF_APEX_DEN;
3860    mExifInfo.aperture.den = EXIF_DEF_APEX_DEN;
3861    //3 Maximum lens aperture
3862    mExifInfo.max_aperture.num = mExifInfo.aperture.num;
3863    mExifInfo.max_aperture.den = mExifInfo.aperture.den;
3864    //3 Lens Focal Length
3865    mExifInfo.focal_length.num = EXIF_DEF_FOCAL_LEN_NUM;
3866    mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN;
3867    //3 User Comments
3868    strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS);
3869    //3 Color Space information
3870    mExifInfo.color_space = EXIF_DEF_COLOR_SPACE;
3871    //3 Exposure Mode
3872    mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE;
3873
3874    //2 0th IFD GPS Info Tags
3875    unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 };
3876    memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version));
3877
3878    //2 1th IFD TIFF Tags
3879    mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION;
3880    mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM;
3881    mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN;
3882    mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM;
3883    mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN;
3884    mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT;
3885}
3886
3887void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect,
3888	camera2_shot *currentEntry)
3889{
3890    camera2_dm *dm = &(currentEntry->dm);
3891    camera2_ctl *ctl = &(currentEntry->ctl);
3892
3893    ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue );
3894    if (!ctl->request.frameCount)
3895       return;
3896    //2 0th IFD TIFF Tags
3897    //3 Width
3898    exifInfo->width = rect->w;
3899    //3 Height
3900    exifInfo->height = rect->h;
3901    //3 Orientation
3902    switch (ctl->jpeg.orientation) {
3903    case 90:
3904        exifInfo->orientation = EXIF_ORIENTATION_90;
3905        break;
3906    case 180:
3907        exifInfo->orientation = EXIF_ORIENTATION_180;
3908        break;
3909    case 270:
3910        exifInfo->orientation = EXIF_ORIENTATION_270;
3911        break;
3912    case 0:
3913    default:
3914        exifInfo->orientation = EXIF_ORIENTATION_UP;
3915        break;
3916    }
3917
3918    //3 Date time
3919    time_t rawtime;
3920    struct tm *timeinfo;
3921    time(&rawtime);
3922    timeinfo = localtime(&rawtime);
3923    strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo);
3924
3925    //2 0th IFD Exif Private Tags
3926    //3 Exposure Time
3927    int shutterSpeed = (dm->sensor.exposureTime/1000);
3928
3929    if (shutterSpeed < 0) {
3930        shutterSpeed = 100;
3931    }
3932
3933    exifInfo->exposure_time.num = 1;
3934    // x us -> 1/x s */
3935    //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed);
3936    exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed);
3937
3938    //3 ISO Speed Rating
3939    exifInfo->iso_speed_rating = dm->aa.isoValue;
3940
3941    uint32_t av, tv, bv, sv, ev;
3942    av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den);
3943    tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den);
3944    sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating);
3945    bv = av + tv - sv;
3946    ev = av + tv;
3947    //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating);
3948    ALOGD("AV=%d, TV=%d, SV=%d", av, tv, sv);
3949
3950    //3 Shutter Speed
3951    exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN;
3952    exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN;
3953    //3 Brightness
3954    exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN;
3955    exifInfo->brightness.den = EXIF_DEF_APEX_DEN;
3956    //3 Exposure Bias
3957    if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH||
3958        ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) {
3959        exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN;
3960        exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN;
3961    } else {
3962        exifInfo->exposure_bias.num = 0;
3963        exifInfo->exposure_bias.den = 0;
3964    }
3965    //3 Metering Mode
3966    /*switch (m_curCameraInfo->metering) {
3967    case METERING_MODE_CENTER:
3968        exifInfo->metering_mode = EXIF_METERING_CENTER;
3969        break;
3970    case METERING_MODE_MATRIX:
3971        exifInfo->metering_mode = EXIF_METERING_MULTISPOT;
3972        break;
3973    case METERING_MODE_SPOT:
3974        exifInfo->metering_mode = EXIF_METERING_SPOT;
3975        break;
3976    case METERING_MODE_AVERAGE:
3977    default:
3978        exifInfo->metering_mode = EXIF_METERING_AVERAGE;
3979        break;
3980    }*/
3981    exifInfo->metering_mode = EXIF_METERING_CENTER;
3982
3983    //3 Flash
3984    int flash = dm->flash.flashMode;
3985    if (dm->flash.flashMode == FLASH_MODE_OFF || flash < 0)
3986        exifInfo->flash = EXIF_DEF_FLASH;
3987    else
3988        exifInfo->flash = flash;
3989
3990    //3 White Balance
3991    if (dm->aa.awbMode == AA_AWBMODE_WB_AUTO)
3992        exifInfo->white_balance = EXIF_WB_AUTO;
3993    else
3994        exifInfo->white_balance = EXIF_WB_MANUAL;
3995
3996    //3 Scene Capture Type
3997    switch (ctl->aa.sceneMode) {
3998    case AA_SCENE_MODE_PORTRAIT:
3999        exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT;
4000        break;
4001    case AA_SCENE_MODE_LANDSCAPE:
4002        exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE;
4003        break;
4004    case AA_SCENE_MODE_NIGHT_PORTRAIT:
4005        exifInfo->scene_capture_type = EXIF_SCENE_NIGHT;
4006        break;
4007    default:
4008        exifInfo->scene_capture_type = EXIF_SCENE_STANDARD;
4009        break;
4010    }
4011
4012    //2 0th IFD GPS Info Tags
4013    if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) {
4014
4015        if (ctl->jpeg.gpsCoordinates[0] > 0)
4016            strcpy((char *)exifInfo->gps_latitude_ref, "N");
4017        else
4018            strcpy((char *)exifInfo->gps_latitude_ref, "S");
4019
4020        if (ctl->jpeg.gpsCoordinates[1] > 0)
4021            strcpy((char *)exifInfo->gps_longitude_ref, "E");
4022        else
4023            strcpy((char *)exifInfo->gps_longitude_ref, "W");
4024
4025        if (ctl->jpeg.gpsCoordinates[2] > 0)
4026            exifInfo->gps_altitude_ref = 0;
4027        else
4028            exifInfo->gps_altitude_ref = 1;
4029
4030        double latitude = fabs(ctl->jpeg.gpsCoordinates[0] / 10000.0);
4031        double longitude = fabs(ctl->jpeg.gpsCoordinates[1] / 10000.0);
4032        double altitude = fabs(ctl->jpeg.gpsCoordinates[2] / 100.0);
4033
4034        exifInfo->gps_latitude[0].num = (uint32_t)latitude;
4035        exifInfo->gps_latitude[0].den = 1;
4036        exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60);
4037        exifInfo->gps_latitude[1].den = 1;
4038        exifInfo->gps_latitude[2].num = (uint32_t)((((latitude - exifInfo->gps_latitude[0].num) * 60)
4039                                        - exifInfo->gps_latitude[1].num) * 60);
4040        exifInfo->gps_latitude[2].den = 1;
4041
4042        exifInfo->gps_longitude[0].num = (uint32_t)longitude;
4043        exifInfo->gps_longitude[0].den = 1;
4044        exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60);
4045        exifInfo->gps_longitude[1].den = 1;
4046        exifInfo->gps_longitude[2].num = (uint32_t)((((longitude - exifInfo->gps_longitude[0].num) * 60)
4047                                        - exifInfo->gps_longitude[1].num) * 60);
4048        exifInfo->gps_longitude[2].den = 1;
4049
4050        exifInfo->gps_altitude.num = (uint32_t)altitude;
4051        exifInfo->gps_altitude.den = 1;
4052
4053        struct tm tm_data;
4054        long timestamp;
4055        timestamp = (long)ctl->jpeg.gpsTimestamp;
4056        gmtime_r(&timestamp, &tm_data);
4057        exifInfo->gps_timestamp[0].num = tm_data.tm_hour;
4058        exifInfo->gps_timestamp[0].den = 1;
4059        exifInfo->gps_timestamp[1].num = tm_data.tm_min;
4060        exifInfo->gps_timestamp[1].den = 1;
4061        exifInfo->gps_timestamp[2].num = tm_data.tm_sec;
4062        exifInfo->gps_timestamp[2].den = 1;
4063        snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp),
4064                "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday);
4065
4066        exifInfo->enableGps = true;
4067    } else {
4068        exifInfo->enableGps = false;
4069    }
4070
4071    //2 1th IFD TIFF Tags
4072    exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0];
4073    exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1];
4074}
4075
4076ExynosCameraHWInterface2::MainThread::~MainThread()
4077{
4078    ALOGV("(%s):", __FUNCTION__);
4079}
4080
4081void ExynosCameraHWInterface2::MainThread::release()
4082{
4083    ALOGV("(%s):", __func__);
4084    SetSignal(SIGNAL_THREAD_RELEASE);
4085}
4086
4087ExynosCameraHWInterface2::SensorThread::~SensorThread()
4088{
4089    ALOGV("(%s):", __FUNCTION__);
4090}
4091
4092void ExynosCameraHWInterface2::SensorThread::release()
4093{
4094    ALOGV("(%s):", __func__);
4095    SetSignal(SIGNAL_THREAD_RELEASE);
4096}
4097
4098ExynosCameraHWInterface2::IspThread::~IspThread()
4099{
4100    ALOGV("(%s):", __FUNCTION__);
4101}
4102
4103void ExynosCameraHWInterface2::IspThread::release()
4104{
4105    ALOGV("(%s):", __func__);
4106    SetSignal(SIGNAL_THREAD_RELEASE);
4107}
4108
4109ExynosCameraHWInterface2::StreamThread::~StreamThread()
4110{
4111    ALOGV("(%s):", __FUNCTION__);
4112}
4113
4114void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
4115{
4116    ALOGV("DEBUG(%s):", __FUNCTION__);
4117
4118    m_tempParameters = new_parameters;
4119
4120    SetSignal(SIGNAL_STREAM_CHANGE_PARAMETER);
4121
4122    // TODO : return synchronously (after setting parameters asynchronously)
4123    usleep(2000);
4124}
4125
4126void ExynosCameraHWInterface2::StreamThread::applyChange()
4127{
4128    memcpy(&m_parameters, m_tempParameters, sizeof(stream_parameters_t));
4129
4130    ALOGV("DEBUG(%s):  Applying Stream paremeters  width(%d), height(%d)",
4131            __FUNCTION__, m_parameters.outputWidth, m_parameters.outputHeight);
4132}
4133
4134void ExynosCameraHWInterface2::StreamThread::release()
4135{
4136    ALOGV("(%s):", __func__);
4137    SetSignal(SIGNAL_THREAD_RELEASE);
4138}
4139
4140int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr)
4141{
4142    int index;
4143    for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
4144        if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr)
4145            return index;
4146    }
4147    return -1;
4148}
4149
4150void ExynosCameraHWInterface2::StreamThread::setRecordingParameter(record_parameters_t * recordParm)
4151{
4152    memcpy(&m_recordParameters, recordParm, sizeof(record_parameters_t));
4153}
4154
4155int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
4156{
4157    if (ionClient == 0) {
4158        ionClient = ion_client_create();
4159        if (ionClient < 0) {
4160            ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient);
4161            return 0;
4162        }
4163    }
4164
4165    return ionClient;
4166}
4167
4168int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient)
4169{
4170    if (ionClient != 0) {
4171        if (ionClient > 0) {
4172            ion_client_destroy(ionClient);
4173        }
4174        ionClient = 0;
4175    }
4176
4177    return ionClient;
4178}
4179
4180int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum)
4181{
4182    int ret = 0;
4183    int i = 0;
4184
4185    if (ionClient == 0) {
4186        ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient);
4187        return -1;
4188    }
4189
4190    for (i=0;i<iMemoryNum;i++) {
4191        if (buf->size.extS[i] == 0) {
4192            break;
4193        }
4194
4195        buf->fd.extFd[i] = ion_alloc(ionClient, \
4196                                      buf->size.extS[i], 0, ION_HEAP_EXYNOS_MASK,0);
4197        if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) {
4198            ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]);
4199            buf->fd.extFd[i] = -1;
4200            freeCameraMemory(buf, iMemoryNum);
4201            return -1;
4202        }
4203
4204        buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \
4205                                        buf->size.extS[i], 0);
4206        if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) {
4207            ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]);
4208            buf->virt.extP[i] = (char *)MAP_FAILED;
4209            freeCameraMemory(buf, iMemoryNum);
4210            return -1;
4211        }
4212        ALOGV("allocCameraMem : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]);
4213    }
4214
4215    return ret;
4216}
4217
4218void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum)
4219{
4220
4221    int i =0 ;
4222    int ret = 0;
4223
4224    for (i=0;i<iMemoryNum;i++) {
4225        if (buf->fd.extFd[i] != -1) {
4226            if (buf->virt.extP[i] != (char *)MAP_FAILED) {
4227                ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
4228                if (ret < 0)
4229                    ALOGE("ERR(%s)", __FUNCTION__);
4230            }
4231            ion_free(buf->fd.extFd[i]);
4232        }
4233        buf->fd.extFd[i] = -1;
4234        buf->virt.extP[i] = (char *)MAP_FAILED;
4235        buf->size.extS[i] = 0;
4236    }
4237}
4238
4239void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum)
4240{
4241    int i =0 ;
4242    for (i=0;i<iMemoryNum;i++) {
4243        buf->virt.extP[i] = (char *)MAP_FAILED;
4244        buf->fd.extFd[i] = -1;
4245        buf->size.extS[i] = 0;
4246    }
4247}
4248
4249
4250
4251
4252static camera2_device_t *g_cam2_device = NULL;
4253static bool g_camera_vaild = false;
4254ExynosCamera2 * g_camera2[2] = { NULL, NULL };
4255
4256static int HAL2_camera_device_close(struct hw_device_t* device)
4257{
4258    ALOGV("%s: ENTER", __FUNCTION__);
4259    if (device) {
4260
4261        camera2_device_t *cam_device = (camera2_device_t *)device;
4262        ALOGV("cam_device(0x%08x):", (unsigned int)cam_device);
4263        ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
4264        delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
4265        g_cam2_device = NULL;
4266        free(cam_device);
4267        g_camera_vaild = false;
4268    }
4269    if (g_camera2[0] != NULL) {
4270        delete static_cast<ExynosCamera2 *>(g_camera2[0]);
4271        g_camera2[0] = NULL;
4272    }
4273
4274    if (g_camera2[1] != NULL) {
4275        delete static_cast<ExynosCamera2 *>(g_camera2[1]);
4276        g_camera2[1] = NULL;
4277    }
4278
4279    ALOGV("%s: EXIT", __FUNCTION__);
4280    return 0;
4281}
4282
4283static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev)
4284{
4285    return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv);
4286}
4287
4288static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev,
4289            const camera2_request_queue_src_ops_t *request_src_ops)
4290{
4291    ALOGV("DEBUG(%s):", __FUNCTION__);
4292    return obj(dev)->setRequestQueueSrcOps(request_src_ops);
4293}
4294
4295static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev)
4296{
4297    ALOGV("DEBUG(%s):", __FUNCTION__);
4298    return obj(dev)->notifyRequestQueueNotEmpty();
4299}
4300
4301static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev,
4302            const camera2_frame_queue_dst_ops_t *frame_dst_ops)
4303{
4304    ALOGV("DEBUG(%s):", __FUNCTION__);
4305    return obj(dev)->setFrameQueueDstOps(frame_dst_ops);
4306}
4307
4308static int HAL2_device_get_in_progress_count(const struct camera2_device *dev)
4309{
4310    ALOGV("DEBUG(%s):", __FUNCTION__);
4311    return obj(dev)->getInProgressCount();
4312}
4313
4314static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev)
4315{
4316    ALOGV("DEBUG(%s):", __FUNCTION__);
4317    return obj(dev)->flushCapturesInProgress();
4318}
4319
4320static int HAL2_device_construct_default_request(const struct camera2_device *dev,
4321            int request_template, camera_metadata_t **request)
4322{
4323    ALOGV("DEBUG(%s):", __FUNCTION__);
4324    return obj(dev)->constructDefaultRequest(request_template, request);
4325}
4326
4327static int HAL2_device_allocate_stream(
4328            const struct camera2_device *dev,
4329            // inputs
4330            uint32_t width,
4331            uint32_t height,
4332            int      format,
4333            const camera2_stream_ops_t *stream_ops,
4334            // outputs
4335            uint32_t *stream_id,
4336            uint32_t *format_actual,
4337            uint32_t *usage,
4338            uint32_t *max_buffers)
4339{
4340    ALOGV("(%s): ", __FUNCTION__);
4341    return obj(dev)->allocateStream(width, height, format, stream_ops,
4342                                    stream_id, format_actual, usage, max_buffers);
4343}
4344
4345
4346static int HAL2_device_register_stream_buffers(const struct camera2_device *dev,
4347            uint32_t stream_id,
4348            int num_buffers,
4349            buffer_handle_t *buffers)
4350{
4351    ALOGV("DEBUG(%s):", __FUNCTION__);
4352    return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers);
4353}
4354
4355static int HAL2_device_release_stream(
4356        const struct camera2_device *dev,
4357            uint32_t stream_id)
4358{
4359    ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
4360    if (!g_camera_vaild)
4361        return 0;
4362    return obj(dev)->releaseStream(stream_id);
4363}
4364
4365static int HAL2_device_allocate_reprocess_stream(
4366           const struct camera2_device *dev,
4367            uint32_t width,
4368            uint32_t height,
4369            uint32_t format,
4370            const camera2_stream_in_ops_t *reprocess_stream_ops,
4371            // outputs
4372            uint32_t *stream_id,
4373            uint32_t *consumer_usage,
4374            uint32_t *max_buffers)
4375{
4376    ALOGV("DEBUG(%s):", __FUNCTION__);
4377    return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops,
4378                                    stream_id, consumer_usage, max_buffers);
4379}
4380
4381static int HAL2_device_release_reprocess_stream(
4382        const struct camera2_device *dev,
4383            uint32_t stream_id)
4384{
4385    ALOGV("DEBUG(%s):", __FUNCTION__);
4386    return obj(dev)->releaseReprocessStream(stream_id);
4387}
4388
4389static int HAL2_device_trigger_action(const struct camera2_device *dev,
4390           uint32_t trigger_id,
4391            int ext1,
4392            int ext2)
4393{
4394    ALOGV("DEBUG(%s):", __FUNCTION__);
4395    return obj(dev)->triggerAction(trigger_id, ext1, ext2);
4396}
4397
4398static int HAL2_device_set_notify_callback(const struct camera2_device *dev,
4399            camera2_notify_callback notify_cb,
4400            void *user)
4401{
4402    ALOGV("DEBUG(%s):", __FUNCTION__);
4403    return obj(dev)->setNotifyCallback(notify_cb, user);
4404}
4405
4406static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev,
4407            vendor_tag_query_ops_t **ops)
4408{
4409    ALOGV("DEBUG(%s):", __FUNCTION__);
4410    return obj(dev)->getMetadataVendorTagOps(ops);
4411}
4412
4413static int HAL2_device_dump(const struct camera2_device *dev, int fd)
4414{
4415    ALOGV("DEBUG(%s):", __FUNCTION__);
4416    return obj(dev)->dump(fd);
4417}
4418
4419
4420
4421
4422
4423static int HAL2_getNumberOfCameras()
4424{
4425    ALOGV("(%s): returning 2", __FUNCTION__);
4426    return 2;
4427}
4428
4429
4430static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
4431{
4432    ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
4433    static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
4434
4435    status_t res;
4436
4437    if (cameraId == 0) {
4438        info->facing = CAMERA_FACING_BACK;
4439        if (!g_camera2[0])
4440            g_camera2[0] = new ExynosCamera2(0);
4441    }
4442    else if (cameraId == 1) {
4443        info->facing = CAMERA_FACING_FRONT;
4444        if (!g_camera2[1])
4445            g_camera2[1] = new ExynosCamera2(1);
4446    }
4447    else
4448        return BAD_VALUE;
4449
4450    info->orientation = 0;
4451    info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0);
4452    if (mCameraInfo[cameraId] == NULL) {
4453        res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true);
4454        if (res != OK) {
4455            ALOGE("%s: Unable to allocate static info: %s (%d)",
4456                    __FUNCTION__, strerror(-res), res);
4457            return res;
4458        }
4459        res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false);
4460        if (res != OK) {
4461            ALOGE("%s: Unable to fill in static info: %s (%d)",
4462                    __FUNCTION__, strerror(-res), res);
4463            return res;
4464        }
4465    }
4466    info->static_camera_characteristics = mCameraInfo[cameraId];
4467    return NO_ERROR;
4468}
4469
4470#define SET_METHOD(m) m : HAL2_device_##m
4471
4472static camera2_device_ops_t camera2_device_ops = {
4473        SET_METHOD(set_request_queue_src_ops),
4474        SET_METHOD(notify_request_queue_not_empty),
4475        SET_METHOD(set_frame_queue_dst_ops),
4476        SET_METHOD(get_in_progress_count),
4477        SET_METHOD(flush_captures_in_progress),
4478        SET_METHOD(construct_default_request),
4479        SET_METHOD(allocate_stream),
4480        SET_METHOD(register_stream_buffers),
4481        SET_METHOD(release_stream),
4482        SET_METHOD(allocate_reprocess_stream),
4483        SET_METHOD(release_reprocess_stream),
4484        SET_METHOD(trigger_action),
4485        SET_METHOD(set_notify_callback),
4486        SET_METHOD(get_metadata_vendor_tag_ops),
4487        SET_METHOD(dump),
4488};
4489
4490#undef SET_METHOD
4491
4492
4493static int HAL2_camera_device_open(const struct hw_module_t* module,
4494                                  const char *id,
4495                                  struct hw_device_t** device)
4496{
4497
4498
4499    int cameraId = atoi(id);
4500
4501    g_camera_vaild = false;
4502    ALOGV("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
4503    if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
4504        ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
4505        return -EINVAL;
4506    }
4507
4508    ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
4509    if (g_cam2_device) {
4510        if (obj(g_cam2_device)->getCameraId() == cameraId) {
4511            ALOGV("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
4512            goto done;
4513        } else {
4514
4515            while (g_cam2_device)
4516                usleep(10000);
4517        }
4518    }
4519
4520    g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
4521    ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
4522
4523    if (!g_cam2_device)
4524        return -ENOMEM;
4525
4526    g_cam2_device->common.tag     = HARDWARE_DEVICE_TAG;
4527    g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0;
4528    g_cam2_device->common.module  = const_cast<hw_module_t *>(module);
4529    g_cam2_device->common.close   = HAL2_camera_device_close;
4530
4531    g_cam2_device->ops = &camera2_device_ops;
4532
4533    ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id);
4534
4535    g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId]);
4536
4537done:
4538    *device = (hw_device_t *)g_cam2_device;
4539    ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device);
4540    g_camera_vaild = true;
4541
4542    return 0;
4543}
4544
4545
4546static hw_module_methods_t camera_module_methods = {
4547            open : HAL2_camera_device_open
4548};
4549
4550extern "C" {
4551    struct camera_module HAL_MODULE_INFO_SYM = {
4552      common : {
4553          tag                : HARDWARE_MODULE_TAG,
4554          module_api_version : CAMERA_MODULE_API_VERSION_2_0,
4555          hal_api_version    : HARDWARE_HAL_API_VERSION,
4556          id                 : CAMERA_HARDWARE_MODULE_ID,
4557          name               : "Exynos Camera HAL2",
4558          author             : "Samsung Corporation",
4559          methods            : &camera_module_methods,
4560          dso:                NULL,
4561          reserved:           {0},
4562      },
4563      get_number_of_cameras : HAL2_getNumberOfCameras,
4564      get_camera_info       : HAL2_getCameraInfo
4565    };
4566}
4567
4568}; // namespace android
4569