ExynosCameraHWInterface2.cpp revision fd2d78a2d270f85a71578423e1f2b4f99fd99216
1/*
2**
3** Copyright 2008, The Android Open Source Project
4** Copyright 2012, Samsung Electronics Co. LTD
5**
6** Licensed under the Apache License, Version 2.0 (the "License");
7** you may not use this file except in compliance with the License.
8** You may obtain a copy of the License at
9**
10**     http://www.apache.org/licenses/LICENSE-2.0
11**
12** Unless required by applicable law or agreed to in writing, software
13** distributed under the License is distributed on an "AS IS" BASIS,
14** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15** See the License for the specific language governing permissions and
16** limitations under the License.
17*/
18
19/*!
20 * \file      ExynosCameraHWInterface2.cpp
21 * \brief     source file for Android Camera API 2.0 HAL
22 * \author    Sungjoong Kang(sj3.kang@samsung.com)
23 * \date      2012/07/10
24 *
25 * <b>Revision History: </b>
26 * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
27 *   Initial Release
28 *
29 * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
30 *   2nd Release
31 *
32 */
33
34//#define LOG_NDEBUG 0
35#define LOG_TAG "ExynosCameraHAL2"
36#include <utils/Log.h>
37
38#include "ExynosCameraHWInterface2.h"
39#include "exynos_format.h"
40
41
42
43namespace android {
44
45
46void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
47{
48    int nw;
49    int cnt = 0;
50    uint32_t written = 0;
51
52    ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
53    int fd = open(fname, O_RDWR | O_CREAT, 0644);
54    if (fd < 0) {
55        ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
56        return;
57    }
58
59    ALOGV("writing %d bytes to file [%s]", size, fname);
60    while (written < size) {
61        nw = ::write(fd, buf + written, size - written);
62        if (nw < 0) {
63            ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
64            break;
65        }
66        written += nw;
67        cnt++;
68    }
69    ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
70    ::close(fd);
71}
72
73int get_pixel_depth(uint32_t fmt)
74{
75    int depth = 0;
76
77    switch (fmt) {
78    case V4L2_PIX_FMT_JPEG:
79        depth = 8;
80        break;
81
82    case V4L2_PIX_FMT_NV12:
83    case V4L2_PIX_FMT_NV21:
84    case V4L2_PIX_FMT_YUV420:
85    case V4L2_PIX_FMT_YVU420M:
86    case V4L2_PIX_FMT_NV12M:
87    case V4L2_PIX_FMT_NV12MT:
88        depth = 12;
89        break;
90
91    case V4L2_PIX_FMT_RGB565:
92    case V4L2_PIX_FMT_YUYV:
93    case V4L2_PIX_FMT_YVYU:
94    case V4L2_PIX_FMT_UYVY:
95    case V4L2_PIX_FMT_VYUY:
96    case V4L2_PIX_FMT_NV16:
97    case V4L2_PIX_FMT_NV61:
98    case V4L2_PIX_FMT_YUV422P:
99    case V4L2_PIX_FMT_SBGGR10:
100    case V4L2_PIX_FMT_SBGGR12:
101    case V4L2_PIX_FMT_SBGGR16:
102        depth = 16;
103        break;
104
105    case V4L2_PIX_FMT_RGB32:
106        depth = 32;
107        break;
108    default:
109        ALOGE("Get depth failed(format : %d)", fmt);
110        break;
111    }
112
113    return depth;
114}
115
116int cam_int_s_fmt(node_info_t *node)
117{
118    struct v4l2_format v4l2_fmt;
119    unsigned int framesize;
120    int ret;
121
122    memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
123
124    v4l2_fmt.type = node->type;
125    framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
126
127    if (node->planes >= 1) {
128        v4l2_fmt.fmt.pix_mp.width       = node->width;
129        v4l2_fmt.fmt.pix_mp.height      = node->height;
130        v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
131        v4l2_fmt.fmt.pix_mp.field       = V4L2_FIELD_ANY;
132    } else {
133        ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
134    }
135
136    /* Set up for capture */
137    ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
138
139    if (ret < 0)
140        ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
141
142
143    return ret;
144}
145
146int cam_int_reqbufs(node_info_t *node)
147{
148    struct v4l2_requestbuffers req;
149    int ret;
150
151    req.count = node->buffers;
152    req.type = node->type;
153    req.memory = node->memory;
154
155    ret = exynos_v4l2_reqbufs(node->fd, &req);
156
157    if (ret < 0)
158        ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
159
160    return req.count;
161}
162
163int cam_int_qbuf(node_info_t *node, int index)
164{
165    struct v4l2_buffer v4l2_buf;
166    struct v4l2_plane planes[VIDEO_MAX_PLANES];
167    int i;
168    int ret = 0;
169
170    v4l2_buf.m.planes   = planes;
171    v4l2_buf.type       = node->type;
172    v4l2_buf.memory     = node->memory;
173    v4l2_buf.index      = index;
174    v4l2_buf.length     = node->planes;
175
176    for(i = 0; i < node->planes; i++){
177        v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
178        v4l2_buf.m.planes[i].length  = (unsigned long)(node->buffer[index].size.extS[i]);
179    }
180
181    ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
182
183    if (ret < 0)
184        ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
185
186    return ret;
187}
188
189int cam_int_streamon(node_info_t *node)
190{
191    enum v4l2_buf_type type = node->type;
192    int ret;
193
194
195    ret = exynos_v4l2_streamon(node->fd, type);
196
197    if (ret < 0)
198        ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
199
200    ALOGV("On streaming I/O... ... fd(%d)", node->fd);
201
202    return ret;
203}
204
205int cam_int_streamoff(node_info_t *node)
206{
207    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
208    int ret;
209
210
211    ALOGV("Off streaming I/O... fd(%d)", node->fd);
212    ret = exynos_v4l2_streamoff(node->fd, type);
213
214    if (ret < 0)
215        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
216
217    return ret;
218}
219
220int isp_int_streamoff(node_info_t *node)
221{
222    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
223    int ret;
224
225    ALOGV("Off streaming I/O... fd(%d)", node->fd);
226    ret = exynos_v4l2_streamoff(node->fd, type);
227
228    if (ret < 0)
229        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
230
231    return ret;
232}
233
234int cam_int_dqbuf(node_info_t *node)
235{
236    struct v4l2_buffer v4l2_buf;
237    struct v4l2_plane planes[VIDEO_MAX_PLANES];
238    int ret;
239
240    v4l2_buf.type       = node->type;
241    v4l2_buf.memory     = node->memory;
242    v4l2_buf.m.planes   = planes;
243    v4l2_buf.length     = node->planes;
244
245    ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
246    if (ret < 0)
247        ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
248
249    return v4l2_buf.index;
250}
251
252int cam_int_s_input(node_info_t *node, int index)
253{
254    int ret;
255
256    ret = exynos_v4l2_s_input(node->fd, index);
257    if (ret < 0)
258        ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
259
260    return ret;
261}
262
263
264gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
265
266RequestManager::RequestManager(SignalDrivenThread* main_thread):
267    m_numOfEntries(0),
268    m_entryInsertionIndex(-1),
269    m_entryProcessingIndex(-1),
270    m_entryFrameOutputIndex(-1),
271    m_lastAeMode(0),
272    m_lastAaMode(0),
273    m_lastAwbMode(0),
274    m_lastAeComp(0),
275    m_frameIndex(-1)
276{
277    m_metadataConverter = new MetadataConverter;
278    m_mainThread = main_thread;
279    for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
280        memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
281        entries[i].internal_shot.shot.ctl.request.frameCount = -1;
282    }
283    m_sensorPipelineSkipCnt = 0;
284    return;
285}
286
287RequestManager::~RequestManager()
288{
289    ALOGV("%s", __FUNCTION__);
290    if (m_metadataConverter != NULL) {
291        delete m_metadataConverter;
292        m_metadataConverter = NULL;
293    }
294
295    return;
296}
297
298int RequestManager::GetNumEntries()
299{
300    return m_numOfEntries;
301}
302
303void RequestManager::SetDefaultParameters(int cropX)
304{
305    m_cropX = cropX;
306}
307
308bool RequestManager::IsRequestQueueFull()
309{
310    Mutex::Autolock lock(m_requestMutex);
311    if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
312        return true;
313    else
314        return false;
315}
316
317void RequestManager::RegisterRequest(camera_metadata_t * new_request)
318{
319    ALOGV("DEBUG(%s):", __FUNCTION__);
320
321    Mutex::Autolock lock(m_requestMutex);
322
323    request_manager_entry * newEntry = NULL;
324    int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
325    ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex,m_numOfEntries );
326
327
328    newEntry = &(entries[newInsertionIndex]);
329
330    if (newEntry->status!=EMPTY) {
331        ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
332        return;
333    }
334    newEntry->status = REGISTERED;
335    newEntry->original_request = new_request;
336    memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
337    m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
338    newEntry->output_stream_count = newEntry->internal_shot.shot.ctl.request.outputStreams[15];
339
340    m_numOfEntries++;
341    m_entryInsertionIndex = newInsertionIndex;
342
343
344    ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
345    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
346}
347
348void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
349{
350    ALOGV("DEBUG(%s):", __FUNCTION__);
351    int frame_index;
352    request_manager_entry * currentEntry;
353
354    Mutex::Autolock lock(m_requestMutex);
355
356    frame_index = GetFrameIndex();
357    currentEntry =  &(entries[frame_index]);
358    if (currentEntry->status != CAPTURED) {
359        ALOGV("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__
360        , m_entryProcessingIndex, m_entryFrameOutputIndex,(int)(currentEntry->status));
361        return;
362    }
363    if (deregistered_request)  *deregistered_request = currentEntry->original_request;
364
365    currentEntry->status = EMPTY;
366    currentEntry->original_request = NULL;
367    memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
368    currentEntry->internal_shot.shot.ctl.request.frameCount = -1;
369    currentEntry->output_stream_count = 0;
370    currentEntry->dynamic_meta_vaild = false;
371    m_numOfEntries--;
372    ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
373     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
374
375    return;
376}
377
378bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
379                camera_metadata_t ** prepared_frame, int afState)
380{
381    ALOGV("DEBUG(%s):", __FUNCTION__);
382    Mutex::Autolock lock(m_requestMutex);
383    status_t res = NO_ERROR;
384    int tempFrameOutputIndex = GetFrameIndex();
385    request_manager_entry * currentEntry =  &(entries[tempFrameOutputIndex]);
386    ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
387        m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
388
389    if (currentEntry->status != CAPTURED) {
390        ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
391
392        return false;
393    }
394    m_entryFrameOutputIndex = tempFrameOutputIndex;
395    m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 20, 500); //estimated
396    add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1);
397    res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
398                m_tempFrameMetadata);
399    if (res!=NO_ERROR) {
400        ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
401        return false;
402    }
403    *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
404    *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
405    *prepared_frame = m_tempFrameMetadata;
406    ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d)", m_entryFrameOutputIndex,
407        currentEntry->internal_shot.shot.ctl.request.frameCount);
408    // Dump();
409    return true;
410}
411
412int RequestManager::MarkProcessingRequest(ExynosBuffer* buf, int *afMode)
413{
414
415    Mutex::Autolock lock(m_requestMutex);
416    struct camera2_shot_ext * shot_ext;
417    struct camera2_shot_ext * request_shot;
418    int targetStreamIndex = 0;
419    request_manager_entry * newEntry = NULL;
420    static int count = 0;
421
422    if (m_numOfEntries == 0)  {
423        ALOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
424        return -1;
425    }
426
427    if ((m_entryProcessingIndex == m_entryInsertionIndex)
428        && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
429        ALOGD("## MarkProcReq skipping(request underrun) -  num(%d), insert(%d), processing(%d), frame(%d)",
430         m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
431        return -1;
432    }
433
434    int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
435    ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
436
437    newEntry = &(entries[newProcessingIndex]);
438    request_shot = &(newEntry->internal_shot);
439    *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode);
440    if (newEntry->status != REGISTERED) {
441        ALOGD("DEBUG(%s)(%d): Circular buffer abnormal ", __FUNCTION__, newProcessingIndex);
442        return -1;
443    }
444
445    newEntry->status = REQUESTED;
446
447    shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
448
449    memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
450    shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
451    shot_ext->request_sensor = 1;
452    shot_ext->dis_bypass = 1;
453    shot_ext->dnr_bypass = 1;
454    shot_ext->fd_bypass = 1;
455    shot_ext->setfile = 0;
456
457    for (int i = 0; i < newEntry->output_stream_count; i++) {
458        targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[i];
459
460        if (targetStreamIndex==0) {
461            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP", __FUNCTION__, i);
462            shot_ext->request_scp = 1;
463	      if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
464                shot_ext->fd_bypass = 0;
465        }
466        else if (targetStreamIndex == 1) {
467            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerC", __FUNCTION__, i);
468            shot_ext->request_scc = 1;
469        }
470        else if (targetStreamIndex == 2) {
471            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP (record)", __FUNCTION__, i);
472            shot_ext->request_scp = 1;
473            shot_ext->shot.ctl.request.outputStreams[2] = 1;
474            if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
475                shot_ext->fd_bypass = 0;
476        }
477        else {
478            ALOGV("DEBUG(%s): outputstreams(%d) has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
479        }
480    }
481
482    if (count == 0){
483        shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
484    } else
485        shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE;
486
487    count++;
488    shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
489    shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
490    shot_ext->shot.magicNumber = 0x23456789;
491    shot_ext->shot.ctl.sensor.exposureTime = 0;
492    shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
493    shot_ext->shot.ctl.sensor.sensitivity = 0;
494
495    shot_ext->shot.ctl.scaler.cropRegion[0] = 0;
496    shot_ext->shot.ctl.scaler.cropRegion[1] = 0;
497    shot_ext->shot.ctl.scaler.cropRegion[2] = m_cropX;
498
499    m_entryProcessingIndex = newProcessingIndex;
500    return newProcessingIndex;
501}
502
503void RequestManager::NotifyStreamOutput(int frameCnt, int stream_id)
504{
505    int index;
506
507    ALOGV("DEBUG(%s): frameCnt(%d), stream_id(%d)", __FUNCTION__, frameCnt, stream_id);
508
509    index = FindEntryIndexByFrameCnt(frameCnt);
510    if (index == -1) {
511        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
512        return;
513    }
514    ALOGV("DEBUG(%s): frameCnt(%d), stream_id(%d) last cnt (%d)", __FUNCTION__, frameCnt, stream_id,  entries[index].output_stream_count);
515
516    entries[index].output_stream_count--;  //TODO : match stream id also
517    CheckCompleted(index);
518    return;
519}
520
521void RequestManager::CheckCompleted(int index)
522{
523    ALOGV("DEBUG(%s): reqIndex(%d) current Count(%d)", __FUNCTION__, index, entries[index].output_stream_count);
524    SetFrameIndex(index);
525    m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
526    return;
527}
528
529void RequestManager::SetFrameIndex(int index)
530{
531    Mutex::Autolock lock(m_requestMutex);
532    m_frameIndex = index;
533}
534
535int RequestManager::GetFrameIndex()
536{
537    return m_frameIndex;
538}
539
540void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
541{
542    int index;
543    struct camera2_shot_ext * request_shot;
544    nsecs_t timeStamp;
545    int i;
546
547    ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
548
549    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
550        if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount)
551            && (entries[i].status == CAPTURED))
552            break;
553    }
554
555    if (i == NUM_MAX_REQUEST_MGR_ENTRY){
556        ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
557        return;
558    }
559
560    request_manager_entry * newEntry = &(entries[i]);
561    request_shot = &(newEntry->internal_shot);
562
563    newEntry->dynamic_meta_vaild = true;
564    timeStamp = request_shot->shot.dm.sensor.timeStamp;
565    memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
566    request_shot->shot.dm.sensor.timeStamp = timeStamp;
567    CheckCompleted(i);
568}
569
570void RequestManager::DumpInfoWithIndex(int index)
571{
572    struct camera2_shot_ext * currMetadata = &(entries[index].internal_shot);
573
574    ALOGV("####   frameCount(%d) exposureTime(%lld) ISO(%d)",
575        currMetadata->shot.ctl.request.frameCount,
576        currMetadata->shot.ctl.sensor.exposureTime,
577        currMetadata->shot.ctl.sensor.sensitivity);
578    if (currMetadata->shot.ctl.request.outputStreams[15] == 0)
579        ALOGV("####   No output stream selected");
580    else if (currMetadata->shot.ctl.request.outputStreams[15] == 1)
581        ALOGV("####   OutputStreamId : %d", currMetadata->shot.ctl.request.outputStreams[0]);
582    else if (currMetadata->shot.ctl.request.outputStreams[15] == 2)
583        ALOGV("####   OutputStreamId : %d, %d", currMetadata->shot.ctl.request.outputStreams[0],
584            currMetadata->shot.ctl.request.outputStreams[1]);
585    else
586        ALOGV("####   OutputStream num (%d) abnormal ", currMetadata->shot.ctl.request.outputStreams[15]);
587}
588
589void    RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, bool afTrigger)
590{
591    int index, targetStreamIndex;
592    struct camera2_shot_ext * request_shot;
593
594    ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
595    if (frameCnt < 0)
596        return;
597
598    index = FindEntryIndexByFrameCnt(frameCnt);
599    if (index == -1) {
600        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
601        return;
602    }
603
604    request_manager_entry * newEntry = &(entries[index]);
605    request_shot = &(newEntry->internal_shot);
606    memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl));
607    shot_ext->request_sensor = 1;
608    shot_ext->dis_bypass = 1;
609    shot_ext->dnr_bypass = 1;
610    shot_ext->fd_bypass = 1;
611    shot_ext->setfile = 0;
612
613    shot_ext->request_scc = 0;
614    shot_ext->request_scp = 0;
615
616    shot_ext->shot.ctl.request.outputStreams[0] = 0;
617    shot_ext->shot.ctl.request.outputStreams[1] = 0;
618    shot_ext->shot.ctl.request.outputStreams[2] = 0;
619
620    if (m_lastAaMode == request_shot->shot.ctl.aa.mode) {
621        shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0);
622    }
623    else {
624        shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode;
625        m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode);
626    }
627    if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) {
628        shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0);
629    }
630    else {
631        shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode;
632        m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode);
633    }
634    if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) {
635        shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0);
636    }
637    else {
638        shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode;
639        m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode);
640    }
641    if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) {
642        shot_ext->shot.ctl.aa.aeExpCompensation = 0;
643    }
644    else {
645        shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation;
646        m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation);
647    }
648    if (afTrigger) {
649        ALOGE("### AF Trigger ");
650        shot_ext->shot.ctl.aa.afTrigger = 1;
651        shot_ext->shot.ctl.aa.afRegions[0] = 0;
652        shot_ext->shot.ctl.aa.afRegions[1] = 0;
653        shot_ext->shot.ctl.aa.afRegions[2] = 0;
654        shot_ext->shot.ctl.aa.afRegions[3] = 0;
655        shot_ext->shot.ctl.aa.afRegions[4] = 0;
656    }
657    else
658        shot_ext->shot.ctl.aa.afTrigger = 0;
659    for (int i = 0; i < newEntry->output_stream_count; i++) {
660       targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[i];
661
662        if (targetStreamIndex==0) {
663            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP", __FUNCTION__, i);
664            shot_ext->request_scp = 1;
665            if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
666                shot_ext->fd_bypass = 0;
667        }
668        else if (targetStreamIndex == 1) {
669            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerC", __FUNCTION__, i);
670            shot_ext->request_scc = 1;
671            if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
672                shot_ext->fd_bypass = 0;
673        }
674        else if (targetStreamIndex == 2) {
675            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP (record)", __FUNCTION__, i);
676            shot_ext->request_scp = 1;
677            shot_ext->shot.ctl.request.outputStreams[2] = 1;
678            shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
679            shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
680            if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
681                shot_ext->fd_bypass = 0;
682        }
683        else {
684            ALOGV("DEBUG(%s): outputstreams(%d) has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
685        }
686    }
687        ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__,
688        (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
689        (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode),
690        (int)(shot_ext->shot.ctl.aa.afMode));
691}
692
693int     RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
694{
695    for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
696        if (entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt)
697            return i;
698    }
699    return -1;
700}
701
702void    RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
703{
704    int index = FindEntryIndexByFrameCnt(frameCnt);
705    if (index == -1) {
706        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
707        return;
708    }
709
710    request_manager_entry * currentEntry = &(entries[index]);
711    currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
712    ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
713        index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
714}
715
716uint64_t  RequestManager::GetTimestamp(int index)
717{
718    if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
719        ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
720        return 0;
721    }
722
723    request_manager_entry * currentEntry = &(entries[index]);
724    uint64_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp;
725    ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
726    return frameTime;
727}
728
729int     RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
730{
731    int i;
732
733    if (m_numOfEntries == 0) {
734        ALOGV("(%s): No Entry found", __FUNCTION__);
735        return -1;
736    }
737
738    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
739        if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
740            continue;
741
742        if (entries[i].status == REQUESTED) {
743            entries[i].status = CAPTURED;
744            return entries[i].internal_shot.shot.ctl.request.frameCount;
745        }
746
747    }
748
749    ALOGD("(%s): No Entry found", __FUNCTION__);
750
751    return -1;
752}
753
754void     RequestManager::SetInitialSkip(int count)
755{
756    ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt);
757    if (count > m_sensorPipelineSkipCnt)
758        m_sensorPipelineSkipCnt = count;
759}
760
761int     RequestManager::GetSkipCnt()
762{
763    ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
764    if (m_sensorPipelineSkipCnt == 0)
765        return m_sensorPipelineSkipCnt;
766    else
767        return --m_sensorPipelineSkipCnt;
768}
769
770void RequestManager::Dump(void)
771{
772    int i = 0;
773    request_manager_entry * currentEntry;
774    ALOGD("## Dump  totalentry(%d), insert(%d), processing(%d), frame(%d)",
775    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
776
777    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
778        currentEntry =  &(entries[i]);
779        ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%d outstream[1]-%d", i,
780        currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
781            currentEntry->output_stream_count,
782            currentEntry->internal_shot.shot.ctl.request.outputStreams[0],
783            currentEntry->internal_shot.shot.ctl.request.outputStreams[1]);
784    }
785}
786
787int     RequestManager::GetNextIndex(int index)
788{
789    index++;
790    if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
791        index = 0;
792
793    return index;
794}
795
796ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera):
797            m_requestQueueOps(NULL),
798            m_frameQueueOps(NULL),
799            m_callbackCookie(NULL),
800            m_numOfRemainingReqInSvc(0),
801            m_isRequestQueuePending(false),
802            m_isRequestQueueNull(true),
803            m_isSensorThreadOn(false),
804            m_isSensorStarted(false),
805            m_isIspStarted(false),
806            m_ionCameraClient(0),
807            m_initFlag1(false),
808            m_initFlag2(false),
809            m_scp_flushing(false),
810            m_closing(false),
811            m_recordingEnabled(false),
812            m_needsRecordBufferInit(false),
813            lastFrameCnt(-1),
814            m_scp_closing(false),
815            m_scp_closed(false),
816            m_afState(HAL_AFSTATE_INACTIVE),
817            m_afMode(NO_CHANGE),
818            m_afMode2(NO_CHANGE),
819            m_IsAfModeUpdateRequired(false),
820            m_IsAfTriggerRequired(false),
821            m_IsAfLockRequired(false),
822            m_wideAspect(false),
823            m_afTriggerId(0),
824            m_halDevice(dev),
825            m_need_streamoff(0),
826            m_cameraId(cameraId)
827{
828    ALOGV("DEBUG(%s):", __FUNCTION__);
829    int ret = 0;
830
831    m_exynosPictureCSC = NULL;
832    m_exynosVideoCSC = NULL;
833
834    if (!m_grallocHal) {
835        ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
836        if (ret)
837            ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
838    }
839
840    m_camera2 = camera;
841    m_ionCameraClient = createIonClient(m_ionCameraClient);
842    if(m_ionCameraClient == 0)
843        ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
844
845
846    m_BayerManager = new BayerBufManager();
847    m_mainThread    = new MainThread(this);
848    InitializeISPChain();
849    m_sensorThread  = new SensorThread(this);
850    m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
851    ALOGV("DEBUG(%s): created sensorthread ################", __FUNCTION__);
852
853    m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
854    CSC_METHOD cscMethod = CSC_METHOD_HW;
855    m_exynosPictureCSC = csc_init(cscMethod);
856    if (m_exynosPictureCSC == NULL)
857        ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
858    csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
859
860    m_exynosVideoCSC = csc_init(cscMethod);
861    if (m_exynosVideoCSC == NULL)
862        ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
863    csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
864
865
866    ALOGV("DEBUG(%s): END", __FUNCTION__);
867    m_setExifFixedAttribute();
868}
869
870ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
871{
872    ALOGV("%s: ENTER", __FUNCTION__);
873    this->release();
874    ALOGV("%s: EXIT", __FUNCTION__);
875}
876
877void ExynosCameraHWInterface2::release()
878{
879    int i, res;
880    ALOGD("%s: ENTER", __func__);
881    m_closing = true;
882
883    if (m_streamThreads[1] != NULL) {
884        m_streamThreads[1]->release();
885        m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
886    }
887
888    if (m_streamThreads[0] != NULL) {
889        m_streamThreads[0]->release();
890        m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
891    }
892
893    if (m_ispThread != NULL) {
894        m_ispThread->release();
895    }
896
897    if (m_sensorThread != NULL) {
898        m_sensorThread->release();
899    }
900
901    if (m_mainThread != NULL) {
902        m_mainThread->release();
903    }
904
905    if (m_exynosPictureCSC)
906        csc_deinit(m_exynosPictureCSC);
907    m_exynosPictureCSC = NULL;
908
909    if (m_exynosVideoCSC)
910        csc_deinit(m_exynosVideoCSC);
911    m_exynosVideoCSC = NULL;
912
913
914    if (m_streamThreads[1] != NULL) {
915        while (!m_streamThreads[1]->IsTerminated())
916        {
917            ALOGD("Waiting for ISP thread is tetminated");
918            usleep(100000);
919        }
920        m_streamThreads[1] = NULL;
921    }
922
923    if (m_streamThreads[0] != NULL) {
924        while (!m_streamThreads[0]->IsTerminated())
925        {
926            ALOGD("Waiting for sensor thread is tetminated");
927            usleep(100000);
928        }
929        m_streamThreads[0] = NULL;
930    }
931
932    if (m_ispThread != NULL) {
933        while (!m_ispThread->IsTerminated())
934        {
935            ALOGD("Waiting for isp thread is tetminated");
936            usleep(100000);
937        }
938        m_ispThread = NULL;
939    }
940
941    if (m_sensorThread != NULL) {
942        while (!m_sensorThread->IsTerminated())
943        {
944            ALOGD("Waiting for sensor thread is tetminated");
945            usleep(100000);
946        }
947        m_sensorThread = NULL;
948    }
949
950    if (m_mainThread != NULL) {
951        while (!m_mainThread->IsTerminated())
952        {
953            ALOGD("Waiting for main thread is tetminated");
954            usleep(100000);
955        }
956        m_mainThread = NULL;
957    }
958
959    if (m_requestManager != NULL) {
960        delete m_requestManager;
961        m_requestManager = NULL;
962    }
963
964    if (m_BayerManager != NULL) {
965        delete m_BayerManager;
966        m_BayerManager = NULL;
967    }
968//    for(i = 0; i < m_camera_info.sensor.buffers; i++)
969    for (i = 0; i < NUM_BAYER_BUFFERS; i++)
970        freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
971
972    for(i = 0; i < m_camera_info.capture.buffers; i++)
973        freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
974
975    ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
976    res = exynos_v4l2_close(m_camera_info.sensor.fd);
977    if (res != NO_ERROR ) {
978        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
979    }
980
981    ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
982    res = exynos_v4l2_close(m_camera_info.isp.fd);
983    if (res != NO_ERROR ) {
984        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
985    }
986
987    ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
988    res = exynos_v4l2_close(m_camera_info.capture.fd);
989    if (res != NO_ERROR ) {
990        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
991    }
992
993    ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
994    res = exynos_v4l2_close(m_fd_scp);
995    if (res != NO_ERROR ) {
996        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
997    }
998    ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
999    deleteIonClient(m_ionCameraClient);
1000
1001    ALOGV("%s: EXIT", __func__);
1002}
1003
1004void ExynosCameraHWInterface2::InitializeISPChain()
1005{
1006    char node_name[30];
1007    int fd = 0;
1008    int i;
1009
1010    /* Open Sensor */
1011    memset(&node_name, 0x00, sizeof(char[30]));
1012    sprintf(node_name, "%s%d", NODE_PREFIX, 40);
1013    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1014
1015    if (fd < 0) {
1016        ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1017    }
1018    else {
1019        ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1020    }
1021    m_camera_info.sensor.fd = fd;
1022
1023    /* Open ISP */
1024    memset(&node_name, 0x00, sizeof(char[30]));
1025    sprintf(node_name, "%s%d", NODE_PREFIX, 41);
1026    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1027
1028    if (fd < 0) {
1029        ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1030    }
1031    else {
1032        ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1033    }
1034    m_camera_info.isp.fd = fd;
1035
1036    /* Open ScalerC */
1037    memset(&node_name, 0x00, sizeof(char[30]));
1038    sprintf(node_name, "%s%d", NODE_PREFIX, 42);
1039    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1040
1041    if (fd < 0) {
1042        ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1043    }
1044    else {
1045        ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1046    }
1047    m_camera_info.capture.fd = fd;
1048
1049    /* Open ScalerP */
1050    memset(&node_name, 0x00, sizeof(char[30]));
1051    sprintf(node_name, "%s%d", NODE_PREFIX, 44);
1052    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1053    if (fd < 0) {
1054        ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1055    }
1056    else {
1057        ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1058    }
1059    m_fd_scp = fd;
1060
1061    if(m_cameraId == 0)
1062        m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
1063    else
1064        m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
1065
1066    memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
1067    m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
1068    m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
1069
1070    m_camera_info.dummy_shot.dis_bypass = 1;
1071    m_camera_info.dummy_shot.dnr_bypass = 1;
1072    m_camera_info.dummy_shot.fd_bypass = 1;
1073
1074    /*sensor setting*/
1075    m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
1076    m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
1077    m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
1078
1079    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
1080    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
1081
1082    /*request setting*/
1083    m_camera_info.dummy_shot.request_sensor = 1;
1084    m_camera_info.dummy_shot.request_scc = 0;
1085    m_camera_info.dummy_shot.request_scp = 0;
1086    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
1087    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[1] = 0;
1088    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[2] = 0;
1089
1090    m_camera_info.sensor.width = m_camera2->getSensorRawW();
1091    m_camera_info.sensor.height = m_camera2->getSensorRawH();
1092
1093    m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
1094    m_camera_info.sensor.planes = 2;
1095    m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1096    m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1097    m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
1098    m_camera_info.sensor.ionClient = m_ionCameraClient;
1099
1100    for(i = 0; i < m_camera_info.sensor.buffers; i++){
1101        initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1102        m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
1103        m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
1104        allocCameraMemory(m_camera_info.sensor.ionClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1105    }
1106
1107    m_camera_info.isp.width = m_camera_info.sensor.width;
1108    m_camera_info.isp.height = m_camera_info.sensor.height;
1109    m_camera_info.isp.format = m_camera_info.sensor.format;
1110    m_camera_info.isp.planes = m_camera_info.sensor.planes;
1111    m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1112    m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1113    m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
1114    m_camera_info.isp.ionClient = m_ionCameraClient;
1115
1116    for(i = 0; i < m_camera_info.isp.buffers; i++){
1117        initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
1118        m_camera_info.isp.buffer[i].size.extS[0]    = m_camera_info.sensor.buffer[i].size.extS[0];
1119        m_camera_info.isp.buffer[i].size.extS[1]    = m_camera_info.sensor.buffer[i].size.extS[1];
1120        m_camera_info.isp.buffer[i].fd.extFd[0]     = m_camera_info.sensor.buffer[i].fd.extFd[0];
1121        m_camera_info.isp.buffer[i].fd.extFd[1]     = m_camera_info.sensor.buffer[i].fd.extFd[1];
1122        m_camera_info.isp.buffer[i].virt.extP[0]    = m_camera_info.sensor.buffer[i].virt.extP[0];
1123        m_camera_info.isp.buffer[i].virt.extP[1]    = m_camera_info.sensor.buffer[i].virt.extP[1];
1124    };
1125
1126    /* init ISP */
1127    cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
1128    cam_int_s_fmt(&(m_camera_info.isp));
1129    ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
1130    cam_int_reqbufs(&(m_camera_info.isp));
1131    ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
1132    ALOGV("DEBUG(%s): isp mem alloc done",  __FUNCTION__);
1133
1134    /* init Sensor */
1135    cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
1136    ALOGV("DEBUG(%s): sensor s_input done",  __FUNCTION__);
1137    if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
1138        ALOGE("ERR(%s): sensor s_fmt fail",  __FUNCTION__);
1139    }
1140    ALOGV("DEBUG(%s): sensor s_fmt done",  __FUNCTION__);
1141    cam_int_reqbufs(&(m_camera_info.sensor));
1142    ALOGV("DEBUG(%s): sensor reqbuf done",  __FUNCTION__);
1143    for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1144        ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1145        memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1146                sizeof(struct camera2_shot_ext));
1147        m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1148        m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1149        cam_int_qbuf(&(m_camera_info.sensor), i);
1150    }
1151    ALOGV("== stream_on :: .sensor");
1152    cam_int_streamon(&(m_camera_info.sensor));
1153
1154    /* init Capture */
1155    m_camera_info.capture.width = m_camera2->getSensorW();
1156    m_camera_info.capture.height = m_camera2->getSensorH();
1157    m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
1158    m_camera_info.capture.planes = 1;
1159    m_camera_info.capture.buffers = 8;
1160    m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1161    m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
1162    m_camera_info.capture.ionClient = m_ionCameraClient;
1163
1164    for(i = 0; i < m_camera_info.capture.buffers; i++){
1165        initCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1166        m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
1167        allocCameraMemory(m_camera_info.capture.ionClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1168    }
1169
1170    cam_int_s_input(&(m_camera_info.capture), m_camera_info.sensor_id);
1171    cam_int_s_fmt(&(m_camera_info.capture));
1172    ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1173    cam_int_reqbufs(&(m_camera_info.capture));
1174    ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1175
1176    for (i = 0; i < m_camera_info.capture.buffers; i++) {
1177        ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1178        cam_int_qbuf(&(m_camera_info.capture), i);
1179    }
1180
1181    ALOGV("== stream_on :: capture");
1182    if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1183        ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1184    } else {
1185        m_camera_info.capture.status = true;
1186    }
1187}
1188
1189void ExynosCameraHWInterface2::StartISP()
1190{
1191    ALOGV("== stream_on :: isp");
1192    cam_int_streamon(&(m_camera_info.isp));
1193    exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
1194}
1195
1196int ExynosCameraHWInterface2::getCameraId() const
1197{
1198    return m_cameraId;
1199}
1200
1201int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
1202{
1203    ALOGV("DEBUG(%s):", __FUNCTION__);
1204    if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
1205            && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
1206        m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
1207        return 0;
1208    }
1209    else {
1210        ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
1211        return 1;
1212    }
1213}
1214
1215int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
1216{
1217    ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries());
1218    if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
1219        ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
1220        return 0;
1221    }
1222    m_isRequestQueueNull = false;
1223    if (m_requestManager->GetNumEntries() == 0)
1224        m_requestManager->SetInitialSkip(5);
1225    m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
1226    return 0;
1227}
1228
1229int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
1230{
1231    ALOGV("DEBUG(%s):", __FUNCTION__);
1232    if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
1233            && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
1234        m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
1235        return 0;
1236    }
1237    else {
1238        ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
1239        return 1;
1240    }
1241}
1242
1243int ExynosCameraHWInterface2::getInProgressCount()
1244{
1245    int inProgressCount = m_requestManager->GetNumEntries();
1246    ALOGV("DEBUG(%s): # of dequeued req (%d)", __FUNCTION__, inProgressCount);
1247    return inProgressCount;
1248}
1249
1250int ExynosCameraHWInterface2::flushCapturesInProgress()
1251{
1252    return 0;
1253}
1254
1255int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
1256{
1257    ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
1258
1259    if (request == NULL) return BAD_VALUE;
1260    if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
1261        return BAD_VALUE;
1262    }
1263    status_t res;
1264    // Pass 1, calculate size and allocate
1265    res = m_camera2->constructDefaultRequest(request_template,
1266            request,
1267            true);
1268    if (res != OK) {
1269        return res;
1270    }
1271    // Pass 2, build request
1272    res = m_camera2->constructDefaultRequest(request_template,
1273            request,
1274            false);
1275    if (res != OK) {
1276        ALOGE("Unable to populate new request for template %d",
1277                request_template);
1278    }
1279
1280    return res;
1281}
1282
1283int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
1284                                    uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
1285{
1286    ALOGV("DEBUG(%s): allocate stream width(%d) height(%d) format(%x)", __FUNCTION__,  width, height, format);
1287    char node_name[30];
1288    int fd = 0, allocCase = 0;
1289    StreamThread *AllocatedStream;
1290    stream_parameters_t newParameters;
1291
1292    if (format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE &&
1293        m_camera2->isSupportedResolution(width, height)) {
1294        if (!(m_streamThreads[0].get())) {
1295            ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
1296            allocCase = 0;
1297        }
1298        else {
1299            if ((m_streamThreads[0].get())->m_activated == true) {
1300                ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
1301                allocCase = 1;
1302            }
1303            else {
1304                ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
1305                allocCase = 2;
1306            }
1307        }
1308        if ((width == 1920 && height == 1080) || (width == 1280 && height == 720)) {
1309            m_wideAspect = true;
1310        }
1311        else {
1312            m_wideAspect = false;
1313        }
1314        ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect);
1315
1316        if (allocCase == 0 || allocCase == 2) {
1317            *stream_id = 0;
1318
1319            if (allocCase == 0) {
1320                m_streamThreads[0]  = new StreamThread(this, *stream_id);
1321             }
1322            AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
1323            m_scp_flushing = false;
1324            m_scp_closing = false;
1325            m_scp_closed = false;
1326            usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
1327
1328            *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1329            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1330            *max_buffers = 8;
1331
1332            newParameters.streamType    = STREAM_TYPE_DIRECT;
1333            newParameters.outputWidth   = width;
1334            newParameters.outputHeight  = height;
1335            newParameters.nodeWidth     = width;
1336            newParameters.nodeHeight    = height;
1337            newParameters.outputFormat  = *format_actual;
1338            newParameters.nodeFormat    = HAL_PIXEL_FORMAT_2_V4L2_PIX(*format_actual);
1339            newParameters.streamOps     = stream_ops;
1340            newParameters.usage         = *usage;
1341            newParameters.numHwBuffers  = 8;
1342            newParameters.numOwnSvcBuffers = *max_buffers;
1343            newParameters.fd            = m_fd_scp;
1344            newParameters.nodePlanes    = 3;
1345            newParameters.svcPlanes     = 3;
1346            newParameters.halBuftype    = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1347            newParameters.memory        = V4L2_MEMORY_DMABUF;
1348            newParameters.ionClient     = m_ionCameraClient;
1349            newParameters.numSvcBufsInHal  = 0;
1350            AllocatedStream->m_index = *stream_id;
1351            AllocatedStream->setParameter(&newParameters);
1352            AllocatedStream->m_activated = true;
1353
1354            m_scp_flushing = false;
1355            m_scp_closing = false;
1356            m_scp_closed = false;
1357            m_requestManager->SetDefaultParameters(m_camera2->getSensorW());
1358            m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW();
1359            return 0;
1360        }
1361        else if (allocCase == 1) {
1362            record_parameters_t recordParameters;
1363            StreamThread *parentStream;
1364            parentStream = (StreamThread*)(m_streamThreads[0].get());
1365            if (!parentStream) {
1366                return 1;
1367                // TODO
1368            }
1369            *stream_id = 2;
1370            usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
1371
1372            *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M
1373            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1374            *max_buffers = 6;
1375
1376            recordParameters.outputWidth   = width;
1377            recordParameters.outputHeight  = height;
1378            recordParameters.outputFormat     = *format_actual;
1379            recordParameters.svcPlanes        = NUM_PLANES(*format_actual);
1380            recordParameters.streamOps     = stream_ops;
1381            recordParameters.usage         = *usage;
1382            recordParameters.numOwnSvcBuffers = *max_buffers;
1383            recordParameters.numSvcBufsInHal  = 0;
1384
1385            parentStream->setRecordingParameter(&recordParameters);
1386            m_scp_flushing = false;
1387            m_scp_closing = false;
1388            m_scp_closed = false;
1389            m_recordingEnabled = true;
1390            return 0;
1391        }
1392    }
1393    else if (format == HAL_PIXEL_FORMAT_BLOB
1394            && m_camera2->isSupportedJpegResolution(width, height)) {
1395
1396        *stream_id = 1;
1397
1398        if (!(m_streamThreads[*stream_id].get())) {
1399            ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
1400            m_streamThreads[1]  = new StreamThread(this, *stream_id);
1401            allocCase = 0;
1402        }
1403        else {
1404            if ((m_streamThreads[*stream_id].get())->m_activated == true) {
1405                ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
1406                allocCase = 1;
1407            }
1408            else {
1409                ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
1410                allocCase = 2;
1411            }
1412        }
1413
1414        AllocatedStream = (StreamThread*)(m_streamThreads[*stream_id].get());
1415
1416        fd = m_camera_info.capture.fd;
1417        usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
1418
1419        *format_actual = HAL_PIXEL_FORMAT_BLOB;
1420
1421        *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1422        *max_buffers = 4;
1423
1424        newParameters.streamType    = STREAM_TYPE_INDIRECT;
1425        newParameters.outputWidth   = width;
1426        newParameters.outputHeight  = height;
1427
1428        newParameters.nodeWidth     = m_camera2->getSensorW();
1429        newParameters.nodeHeight    = m_camera2->getSensorH();
1430
1431        newParameters.outputFormat  = *format_actual;
1432        newParameters.nodeFormat    = V4L2_PIX_FMT_YUYV;
1433        newParameters.streamOps     = stream_ops;
1434        newParameters.usage         = *usage;
1435        newParameters.numHwBuffers  = 8;
1436        newParameters.numOwnSvcBuffers = *max_buffers;
1437        newParameters.fd            = fd;
1438        newParameters.nodePlanes    = 1;
1439        newParameters.svcPlanes     = 1;
1440        newParameters.halBuftype    = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1441        newParameters.memory        = V4L2_MEMORY_DMABUF;
1442        newParameters.ionClient     = m_ionCameraClient;
1443        newParameters.numSvcBufsInHal  = 0;
1444        AllocatedStream->m_index = *stream_id;
1445        AllocatedStream->setParameter(&newParameters);
1446        return 0;
1447    }
1448    ALOGE("DEBUG(%s): Unsupported Pixel Format", __FUNCTION__);
1449    return 1; // TODO : check proper error code
1450}
1451
1452int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
1453        int num_buffers, buffer_handle_t *registeringBuffers)
1454{
1455    int                     i,j;
1456    void                    *virtAddr[3];
1457    uint32_t                plane_index = 0;
1458    stream_parameters_t     *targetStreamParms;
1459    record_parameters_t     *targetRecordParms;
1460    node_info_t             *currentNode;
1461
1462    struct v4l2_buffer v4l2_buf;
1463    struct v4l2_plane  planes[VIDEO_MAX_PLANES];
1464
1465    ALOGV("DEBUG(%s): streamID (%d), num_buff(%d), handle(%x) ", __FUNCTION__,
1466        stream_id, num_buffers, (uint32_t)registeringBuffers);
1467
1468    if (stream_id == 0) {
1469        targetStreamParms = &(m_streamThreads[0]->m_parameters);
1470    }
1471    else if (stream_id == 1) {
1472        targetStreamParms = &(m_streamThreads[1]->m_parameters);
1473        // TODO : make clear stream off case
1474        m_need_streamoff = 0;
1475
1476        if (m_camera_info.capture.status == false) {
1477            /* capture */
1478            m_camera_info.capture.buffers = 8;
1479            cam_int_s_fmt(&(m_camera_info.capture));
1480            cam_int_reqbufs(&(m_camera_info.capture));
1481            for (i = 0; i < m_camera_info.capture.buffers; i++) {
1482                ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1483                cam_int_qbuf(&(m_camera_info.capture), i);
1484            }
1485
1486            if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1487                ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1488            } else {
1489                m_camera_info.capture.status = true;
1490            }
1491        }
1492    }
1493    else if (stream_id == 2) {
1494        m_need_streamoff = 0;
1495        targetRecordParms = &(m_streamThreads[0]->m_recordParameters);
1496
1497        targetRecordParms->numSvcBuffers = num_buffers;
1498
1499        for (i = 0 ; i<targetRecordParms->numSvcBuffers ; i++) {
1500            ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
1501                i, (uint32_t)(registeringBuffers[i]));
1502            if (m_grallocHal) {
1503                if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
1504                       targetRecordParms->usage, 0, 0,
1505                       targetRecordParms->outputWidth, targetRecordParms->outputHeight, virtAddr) != 0) {
1506                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
1507                }
1508                else {
1509                    ExynosBuffer currentBuf;
1510                    const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
1511                    currentBuf.fd.extFd[0] = priv_handle->fd;
1512                    currentBuf.fd.extFd[1] = priv_handle->fd1;
1513                    currentBuf.fd.extFd[2] = priv_handle->fd2;
1514                    for (plane_index=0 ; plane_index < targetRecordParms->svcPlanes ; plane_index++) {
1515                        currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
1516                        ALOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x)",
1517                             __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index],
1518                             (unsigned int)currentBuf.virt.extP[plane_index]);
1519                    }
1520                    targetRecordParms->svcBufStatus[i]  = ON_SERVICE;
1521                    targetRecordParms->svcBuffers[i]    = currentBuf;
1522                    targetRecordParms->svcBufHandle[i]  = registeringBuffers[i];
1523                }
1524            }
1525        }
1526        m_needsRecordBufferInit = true;
1527        return 0;
1528    }
1529    else {
1530        ALOGE("ERR(%s) unregisterd stream id (%d)", __FUNCTION__, stream_id);
1531        return 1;
1532    }
1533
1534    if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
1535        if (num_buffers < targetStreamParms->numHwBuffers) {
1536            ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
1537                __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
1538            return 1;
1539        }
1540    }
1541    ALOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
1542            __FUNCTION__, targetStreamParms->outputFormat, targetStreamParms->outputWidth,
1543            targetStreamParms->outputHeight, targetStreamParms->svcPlanes);
1544
1545    targetStreamParms->numSvcBuffers = num_buffers;
1546    currentNode = &(targetStreamParms->node); // TO Remove
1547
1548    currentNode->fd         = targetStreamParms->fd;
1549    currentNode->width      = targetStreamParms->nodeWidth;
1550    currentNode->height     = targetStreamParms->nodeHeight;
1551    currentNode->format     = targetStreamParms->nodeFormat;
1552    currentNode->planes     = targetStreamParms->nodePlanes;
1553    currentNode->buffers    = targetStreamParms->numHwBuffers;
1554    currentNode->type       = targetStreamParms->halBuftype;
1555    currentNode->memory     = targetStreamParms->memory;
1556    currentNode->ionClient  = targetStreamParms->ionClient;
1557
1558    if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
1559        if(m_need_streamoff == 1) {
1560            if (m_sensorThread != NULL) {
1561                m_sensorThread->release();
1562                /* TODO */
1563                usleep(500000);
1564            } else {
1565                ALOGE("+++++++ sensor thread is NULL %d", __LINE__);
1566            }
1567
1568            ALOGV("(%s): calling capture streamoff", __FUNCTION__);
1569            if (cam_int_streamoff(&(m_camera_info.capture)) < 0) {
1570                ALOGE("ERR(%s): capture stream off fail", __FUNCTION__);
1571            } else {
1572                m_camera_info.capture.status = false;
1573            }
1574
1575            ALOGV("(%s): calling capture streamoff done", __FUNCTION__);
1576
1577            m_camera_info.capture.buffers = 0;
1578            ALOGV("DEBUG(%s): capture calling reqbuf 0 ", __FUNCTION__);
1579            cam_int_reqbufs(&(m_camera_info.capture));
1580            ALOGV("DEBUG(%s): capture calling reqbuf 0 done", __FUNCTION__);
1581
1582            m_isIspStarted = false;
1583        }
1584
1585        if (m_need_streamoff == 1) {
1586            m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1587            m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1588            m_camera_info.capture.buffers = 8;
1589            /* isp */
1590            cam_int_s_fmt(&(m_camera_info.isp));
1591            cam_int_reqbufs(&(m_camera_info.isp));
1592            /* sensor */
1593            cam_int_s_fmt(&(m_camera_info.sensor));
1594            cam_int_reqbufs(&(m_camera_info.sensor));
1595
1596            for (i = 0; i < 8; i++) {
1597                ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
1598                memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1599                        sizeof(struct camera2_shot_ext));
1600                m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1601                m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1602                cam_int_qbuf(&(m_camera_info.sensor), i);
1603            }
1604
1605            /* capture */
1606            cam_int_s_fmt(&(m_camera_info.capture));
1607            cam_int_reqbufs(&(m_camera_info.capture));
1608            for (i = 0; i < m_camera_info.capture.buffers; i++) {
1609                ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
1610                cam_int_qbuf(&(m_camera_info.capture), i);
1611            }
1612
1613       }
1614
1615        cam_int_s_input(currentNode, m_camera_info.sensor_id);
1616        cam_int_s_fmt(currentNode);
1617        cam_int_reqbufs(currentNode);
1618
1619    }
1620    else if (targetStreamParms->streamType == STREAM_TYPE_INDIRECT) {
1621        for(i = 0; i < currentNode->buffers; i++){
1622            memcpy(&(currentNode->buffer[i]), &(m_camera_info.capture.buffer[i]), sizeof(ExynosBuffer));
1623        }
1624    }
1625
1626    for (i = 0 ; i<targetStreamParms->numSvcBuffers ; i++) {
1627        ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
1628            i, (uint32_t)(registeringBuffers[i]));
1629        if (m_grallocHal) {
1630            if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
1631                   targetStreamParms->usage, 0, 0,
1632                   currentNode->width, currentNode->height, virtAddr) != 0) {
1633                ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
1634            }
1635            else {
1636                v4l2_buf.m.planes   = planes;
1637                v4l2_buf.type       = currentNode->type;
1638                v4l2_buf.memory     = currentNode->memory;
1639                v4l2_buf.index      = i;
1640                v4l2_buf.length     = currentNode->planes;
1641
1642                ExynosBuffer currentBuf;
1643                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
1644
1645                m_getAlignedYUVSize(currentNode->format,
1646                    currentNode->width, currentNode->height, &currentBuf);
1647
1648                v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
1649                v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
1650                v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
1651                currentBuf.fd.extFd[0] = priv_handle->fd;
1652                currentBuf.fd.extFd[2] = priv_handle->fd1;
1653                currentBuf.fd.extFd[1] = priv_handle->fd2;
1654                ALOGV("DEBUG(%s):  ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride);
1655                if (currentNode->planes == 1) {
1656                    currentBuf.size.extS[0] = priv_handle->size;
1657                    currentBuf.size.extS[1] = 0;
1658                    currentBuf.size.extS[2] = 0;
1659                }
1660                for (plane_index = 0 ; plane_index < v4l2_buf.length ; plane_index++) {
1661                    currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
1662                    v4l2_buf.m.planes[plane_index].length  = currentBuf.size.extS[plane_index];
1663                    ALOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x), length(%d)",
1664                         __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
1665                         (unsigned int)currentBuf.virt.extP[plane_index],
1666                         v4l2_buf.m.planes[plane_index].length);
1667                }
1668
1669                if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
1670                    if (i < currentNode->buffers) {
1671                        if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
1672                            ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
1673                                __FUNCTION__, stream_id, currentNode->fd);
1674                            //return false;
1675                        }
1676                        ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
1677                                __FUNCTION__, stream_id, currentNode->fd);
1678                        targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
1679                    }
1680                    else {
1681                        targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
1682                    }
1683                }
1684                else if (targetStreamParms->streamType == STREAM_TYPE_INDIRECT) {
1685                    targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
1686                }
1687                targetStreamParms->svcBuffers[i]       = currentBuf;
1688                targetStreamParms->svcBufHandle[i]     = registeringBuffers[i];
1689            }
1690        }
1691    }
1692
1693    ALOGV("DEBUG(%s): calling  streamon", __FUNCTION__);
1694    if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
1695        ALOGD("%s(%d), stream id = %d", __FUNCTION__, __LINE__, stream_id);
1696        cam_int_streamon(&(targetStreamParms->node));
1697    }
1698
1699    if (m_need_streamoff == 1) {
1700        if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1701            ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1702        } else {
1703            m_camera_info.capture.status = true;
1704        }
1705
1706        cam_int_streamon(&(m_camera_info.sensor));
1707    }
1708
1709    ALOGV("DEBUG(%s): calling  streamon END", __FUNCTION__);
1710    ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
1711
1712    if(!m_isIspStarted) {
1713        m_isIspStarted = true;
1714        StartISP();
1715    }
1716
1717    if (m_need_streamoff == 1) {
1718        m_requestManager->SetInitialSkip(8);
1719        m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1720        m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
1721    }
1722    m_need_streamoff = 1;
1723
1724    return 0;
1725}
1726
1727int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
1728{
1729    StreamThread *targetStream;
1730    ALOGV("DEBUG(%s):", __FUNCTION__);
1731
1732    if (stream_id == 0) {
1733        targetStream = (StreamThread*)(m_streamThreads[0].get());
1734        m_scp_flushing = true;
1735    }
1736    else if (stream_id == 1) {
1737        targetStream = (StreamThread*)(m_streamThreads[1].get());
1738    }
1739    else if (stream_id == 2 && m_recordingEnabled) {
1740        m_recordingEnabled = false;
1741        m_needsRecordBufferInit = true;
1742        return 0;
1743    }
1744    else {
1745        ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
1746        return 1;
1747    }
1748
1749    targetStream->m_releasing = true;
1750    do {
1751        ALOGD("stream thread release %d", __LINE__);
1752        targetStream->release();
1753        usleep(33000);
1754    } while (targetStream->m_releasing);
1755    targetStream->m_activated = false;
1756    ALOGV("DEBUG(%s): DONE", __FUNCTION__);
1757    return 0;
1758}
1759
1760int ExynosCameraHWInterface2::allocateReprocessStream(
1761    uint32_t width, uint32_t height, uint32_t format,
1762    const camera2_stream_in_ops_t *reprocess_stream_ops,
1763    uint32_t *stream_id, uint32_t *consumer_usage, uint32_t *max_buffers)
1764{
1765    ALOGV("DEBUG(%s):", __FUNCTION__);
1766    return 0;
1767}
1768
1769int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
1770{
1771    ALOGV("DEBUG(%s):", __FUNCTION__);
1772    return 0;
1773}
1774
1775int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
1776{
1777    ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2);
1778
1779    switch (trigger_id) {
1780    case CAMERA2_TRIGGER_AUTOFOCUS:
1781        ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
1782        OnAfTrigger(ext1);
1783        break;
1784
1785    case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
1786        ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
1787        OnAfCancel(ext1);
1788        break;
1789    default:
1790        break;
1791    }
1792    return 0;
1793}
1794
1795int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
1796{
1797    ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb);
1798    m_notifyCb = notify_cb;
1799    m_callbackCookie = user;
1800    return 0;
1801}
1802
1803int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
1804{
1805    ALOGV("DEBUG(%s):", __FUNCTION__);
1806    return 0;
1807}
1808
1809int ExynosCameraHWInterface2::dump(int fd)
1810{
1811    ALOGV("DEBUG(%s):", __FUNCTION__);
1812    return 0;
1813}
1814
1815void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
1816{
1817    switch (colorFormat) {
1818    // 1p
1819    case V4L2_PIX_FMT_RGB565 :
1820    case V4L2_PIX_FMT_YUYV :
1821    case V4L2_PIX_FMT_UYVY :
1822    case V4L2_PIX_FMT_VYUY :
1823    case V4L2_PIX_FMT_YVYU :
1824        buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
1825        buf->size.extS[1] = 0;
1826        buf->size.extS[2] = 0;
1827        break;
1828    // 2p
1829    case V4L2_PIX_FMT_NV12 :
1830    case V4L2_PIX_FMT_NV12T :
1831    case V4L2_PIX_FMT_NV21 :
1832        buf->size.extS[0] = ALIGN(w,   16) * ALIGN(h,   16);
1833        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
1834        buf->size.extS[2] = 0;
1835        break;
1836    case V4L2_PIX_FMT_NV12M :
1837    case V4L2_PIX_FMT_NV12MT_16X16 :
1838    case V4L2_PIX_FMT_NV21M:
1839        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h,     16);
1840        buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
1841        buf->size.extS[2] = 0;
1842        break;
1843    case V4L2_PIX_FMT_NV16 :
1844    case V4L2_PIX_FMT_NV61 :
1845        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
1846        buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h,  16);
1847        buf->size.extS[2] = 0;
1848        break;
1849     // 3p
1850    case V4L2_PIX_FMT_YUV420 :
1851    case V4L2_PIX_FMT_YVU420 :
1852        buf->size.extS[0] = (w * h);
1853        buf->size.extS[1] = (w * h) >> 2;
1854        buf->size.extS[2] = (w * h) >> 2;
1855        break;
1856    case V4L2_PIX_FMT_YUV420M:
1857    case V4L2_PIX_FMT_YVU420M :
1858    case V4L2_PIX_FMT_YUV422P :
1859        buf->size.extS[0] = ALIGN(w,  32) * ALIGN(h,  16);
1860        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
1861        buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
1862        break;
1863    default:
1864        ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
1865        return;
1866        break;
1867    }
1868}
1869
1870bool ExynosCameraHWInterface2::m_getRatioSize(int  src_w,  int   src_h,
1871                                             int  dst_w,  int   dst_h,
1872                                             int *crop_x, int *crop_y,
1873                                             int *crop_w, int *crop_h,
1874                                             int zoom)
1875{
1876    *crop_w = src_w;
1877    *crop_h = src_h;
1878
1879    if (   src_w != dst_w
1880        || src_h != dst_h) {
1881        float src_ratio = 1.0f;
1882        float dst_ratio = 1.0f;
1883
1884        // ex : 1024 / 768
1885        src_ratio = (float)src_w / (float)src_h;
1886
1887        // ex : 352  / 288
1888        dst_ratio = (float)dst_w / (float)dst_h;
1889
1890        if (dst_w * dst_h < src_w * src_h) {
1891            if (dst_ratio <= src_ratio) {
1892                // shrink w
1893                *crop_w = src_h * dst_ratio;
1894                *crop_h = src_h;
1895            } else {
1896                // shrink h
1897                *crop_w = src_w;
1898                *crop_h = src_w / dst_ratio;
1899            }
1900        } else {
1901            if (dst_ratio <= src_ratio) {
1902                // shrink w
1903                *crop_w = src_h * dst_ratio;
1904                *crop_h = src_h;
1905            } else {
1906                // shrink h
1907                *crop_w = src_w;
1908                *crop_h = src_w / dst_ratio;
1909            }
1910        }
1911    }
1912
1913    if (zoom != 0) {
1914        float zoomLevel = ((float)zoom + 10.0) / 10.0;
1915        *crop_w = (int)((float)*crop_w / zoomLevel);
1916        *crop_h = (int)((float)*crop_h / zoomLevel);
1917    }
1918
1919    #define CAMERA_CROP_WIDTH_RESTRAIN_NUM  (0x2)
1920    unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
1921    if (w_align != 0) {
1922        if (  (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
1923            && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) {
1924            *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
1925        }
1926        else
1927            *crop_w -= w_align;
1928    }
1929
1930    #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM  (0x2)
1931    unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
1932    if (h_align != 0) {
1933        if (  (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
1934            && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) {
1935            *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
1936        }
1937        else
1938            *crop_h -= h_align;
1939    }
1940
1941    *crop_x = (src_w - *crop_w) >> 1;
1942    *crop_y = (src_h - *crop_h) >> 1;
1943
1944    if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
1945        *crop_x -= 1;
1946
1947    if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
1948        *crop_y -= 1;
1949
1950    return true;
1951}
1952
1953BayerBufManager::BayerBufManager()
1954{
1955    ALOGV("DEBUG(%s): ", __FUNCTION__);
1956    for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) {
1957        entries[i].status = BAYER_ON_HAL_EMPTY;
1958        entries[i].reqFrameCnt = 0;
1959    }
1960    sensorEnqueueHead = 0;
1961    sensorDequeueHead = 0;
1962    ispEnqueueHead = 0;
1963    ispDequeueHead = 0;
1964    numOnSensor = 0;
1965    numOnIsp = 0;
1966    numOnHalFilled = 0;
1967    numOnHalEmpty = NUM_BAYER_BUFFERS;
1968}
1969
1970BayerBufManager::~BayerBufManager()
1971{
1972    ALOGV("%s", __FUNCTION__);
1973}
1974
1975int     BayerBufManager::GetIndexForSensorEnqueue()
1976{
1977    int ret = 0;
1978    if (numOnHalEmpty == 0)
1979        ret = -1;
1980    else
1981        ret = sensorEnqueueHead;
1982    ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret);
1983    return ret;
1984}
1985
1986int    BayerBufManager::MarkSensorEnqueue(int index)
1987{
1988    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
1989
1990    // sanity check
1991    if (index != sensorEnqueueHead) {
1992        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead);
1993        return -1;
1994    }
1995    if (entries[index].status != BAYER_ON_HAL_EMPTY) {
1996        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
1997            index, entries[index].status, BAYER_ON_HAL_EMPTY);
1998        return -1;
1999    }
2000
2001    entries[index].status = BAYER_ON_SENSOR;
2002    entries[index].reqFrameCnt = 0;
2003    numOnHalEmpty--;
2004    numOnSensor++;
2005    sensorEnqueueHead = GetNextIndex(index);
2006    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2007        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2008    return 0;
2009}
2010
2011int    BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t *timeStamp)
2012{
2013    ALOGV("DEBUG(%s)    : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
2014
2015    if (entries[index].status != BAYER_ON_SENSOR) {
2016        ALOGE("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2017            index, entries[index].status, BAYER_ON_SENSOR);
2018        return -1;
2019    }
2020
2021    entries[index].status = BAYER_ON_HAL_FILLED;
2022    numOnHalFilled++;
2023    numOnSensor--;
2024
2025    return 0;
2026}
2027
2028int     BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt)
2029{
2030    int ret = 0;
2031    if (numOnHalFilled == 0)
2032        ret = -1;
2033    else {
2034        *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt;
2035        ret = ispEnqueueHead;
2036    }
2037    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2038    return ret;
2039}
2040
2041int     BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt)
2042{
2043    int ret = 0;
2044    if (numOnIsp == 0)
2045        ret = -1;
2046    else {
2047        *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt;
2048        ret = ispDequeueHead;
2049    }
2050    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2051    return ret;
2052}
2053
2054int    BayerBufManager::MarkIspEnqueue(int index)
2055{
2056    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
2057
2058    // sanity check
2059    if (index != ispEnqueueHead) {
2060        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead);
2061        return -1;
2062    }
2063    if (entries[index].status != BAYER_ON_HAL_FILLED) {
2064        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2065            index, entries[index].status, BAYER_ON_HAL_FILLED);
2066        return -1;
2067    }
2068
2069    entries[index].status = BAYER_ON_ISP;
2070    numOnHalFilled--;
2071    numOnIsp++;
2072    ispEnqueueHead = GetNextIndex(index);
2073    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2074        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2075    return 0;
2076}
2077
2078int    BayerBufManager::MarkIspDequeue(int index)
2079{
2080    ALOGV("DEBUG(%s)    : BayerIndex[%d]", __FUNCTION__, index);
2081
2082    // sanity check
2083    if (index != ispDequeueHead) {
2084        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead);
2085        return -1;
2086    }
2087    if (entries[index].status != BAYER_ON_ISP) {
2088        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2089            index, entries[index].status, BAYER_ON_ISP);
2090        return -1;
2091    }
2092
2093    entries[index].status = BAYER_ON_HAL_EMPTY;
2094    entries[index].reqFrameCnt = 0;
2095    numOnHalEmpty++;
2096    numOnIsp--;
2097    ispDequeueHead = GetNextIndex(index);
2098    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2099        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2100    return 0;
2101}
2102
2103int BayerBufManager::GetNumOnSensor()
2104{
2105    return numOnSensor;
2106}
2107
2108int BayerBufManager::GetNumOnHalFilled()
2109{
2110    return numOnHalFilled;
2111}
2112
2113int BayerBufManager::GetNumOnIsp()
2114{
2115    return numOnIsp;
2116}
2117
2118int     BayerBufManager::GetNextIndex(int index)
2119{
2120    index++;
2121    if (index >= NUM_BAYER_BUFFERS)
2122        index = 0;
2123
2124    return index;
2125}
2126
2127void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
2128{
2129    camera_metadata_t *currentRequest = NULL;
2130    camera_metadata_t *currentFrame = NULL;
2131    size_t numEntries = 0;
2132    size_t frameSize = 0;
2133    camera_metadata_t * preparedFrame = NULL;
2134    camera_metadata_t *deregisteredRequest = NULL;
2135    uint32_t currentSignal = self->GetProcessingSignal();
2136    MainThread *  selfThread      = ((MainThread*)self);
2137    int res = 0;
2138
2139    int ret;
2140
2141    ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
2142
2143    if (currentSignal & SIGNAL_THREAD_RELEASE) {
2144        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2145
2146        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
2147        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2148        return;
2149    }
2150
2151    if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
2152        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
2153        if (m_requestManager->IsRequestQueueFull()==false) {
2154            m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
2155            if (NULL == currentRequest) {
2156                ALOGE("DEBUG(%s)(0x%x): dequeue_request returned NULL ", __FUNCTION__, currentSignal);
2157                m_isRequestQueueNull = true;
2158            }
2159            else {
2160                m_requestManager->RegisterRequest(currentRequest);
2161
2162                m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
2163                ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
2164                if (m_requestManager->IsRequestQueueFull()==false)
2165                    selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
2166
2167                m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2168            }
2169        }
2170        else {
2171            m_isRequestQueuePending = true;
2172        }
2173    }
2174
2175    if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
2176        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
2177        /*while (1)*/ {
2178            ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService());
2179            if (ret == false)
2180                ALOGD("++++++ PrepareFrame ret = %d", ret);
2181
2182            m_requestManager->DeregisterRequest(&deregisteredRequest);
2183
2184            ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
2185            if (ret < 0)
2186                ALOGD("++++++ free_request ret = %d", ret);
2187
2188            ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
2189            if (ret < 0)
2190                ALOGD("++++++ dequeue_frame ret = %d", ret);
2191
2192            if (currentFrame==NULL) {
2193                ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
2194            }
2195            else {
2196                ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize);
2197            }
2198            res = append_camera_metadata(currentFrame, preparedFrame);
2199            if (res==0) {
2200                ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
2201                m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
2202            }
2203            else {
2204                ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
2205            }
2206        }
2207        if (!m_isRequestQueueNull) {
2208            selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
2209        }
2210
2211        if (getInProgressCount()>0) {
2212            ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
2213            m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2214        }
2215    }
2216    ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
2217    return;
2218}
2219
2220void ExynosCameraHWInterface2::m_sensorThreadInitialize(SignalDrivenThread * self)
2221{
2222    ALOGV("DEBUG(%s): ", __FUNCTION__ );
2223    /* will add */
2224    return;
2225}
2226
2227
2228void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
2229{
2230    ALOGD("####  common Section");
2231    ALOGD("####                 magic(%x) ",
2232        shot_ext->shot.magicNumber);
2233    ALOGD("####  ctl Section");
2234    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
2235        shot_ext->shot.ctl.request.metadataMode,
2236        shot_ext->shot.ctl.lens.aperture,
2237        shot_ext->shot.ctl.sensor.exposureTime,
2238        shot_ext->shot.ctl.sensor.frameDuration,
2239        shot_ext->shot.ctl.sensor.sensitivity,
2240        shot_ext->shot.ctl.aa.awbMode);
2241
2242    ALOGD("####                 OutputStream Sensor(%d) SCP(%d) SCC(%d) pv(%d) rec(%d)",
2243        shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
2244        shot_ext->shot.ctl.request.outputStreams[0],
2245        shot_ext->shot.ctl.request.outputStreams[2]);
2246
2247    ALOGD("####  DM Section");
2248    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
2249        shot_ext->shot.dm.request.metadataMode,
2250        shot_ext->shot.dm.lens.aperture,
2251        shot_ext->shot.dm.sensor.exposureTime,
2252        shot_ext->shot.dm.sensor.frameDuration,
2253        shot_ext->shot.dm.sensor.sensitivity,
2254        shot_ext->shot.dm.sensor.timeStamp,
2255        shot_ext->shot.dm.aa.awbMode,
2256        shot_ext->shot.dm.request.frameCount );
2257}
2258
2259void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
2260{
2261    uint32_t        currentSignal = self->GetProcessingSignal();
2262    SensorThread *  selfThread      = ((SensorThread*)self);
2263    int index;
2264    int index_isp;
2265    status_t res;
2266    nsecs_t frameTime;
2267    int bayersOnSensor = 0, bayersOnIsp = 0;
2268    int j = 0;
2269    bool isCapture = false;
2270    ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
2271
2272    if (currentSignal & SIGNAL_THREAD_RELEASE) {
2273        ALOGV("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2274
2275        ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
2276        cam_int_streamoff(&(m_camera_info.sensor));
2277        ALOGV("(%s): calling sensor streamoff done", __FUNCTION__);
2278
2279        m_camera_info.sensor.buffers = 0;
2280        ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__);
2281        cam_int_reqbufs(&(m_camera_info.sensor));
2282        ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__);
2283
2284        ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
2285        isp_int_streamoff(&(m_camera_info.isp));
2286        ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
2287
2288        m_camera_info.isp.buffers = 0;
2289        ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__);
2290        cam_int_reqbufs(&(m_camera_info.isp));
2291        ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__);
2292
2293        exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
2294
2295        ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2296        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2297        return;
2298    }
2299
2300    if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
2301    {
2302        ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
2303        int targetStreamIndex = 0, i=0;
2304        int matchedFrameCnt = -1, processingReqIndex;
2305        struct camera2_shot_ext *shot_ext;
2306        struct camera2_shot_ext *shot_ext_capture;
2307        bool triggered = false;
2308        int afMode;
2309
2310        /* dqbuf from sensor */
2311        ALOGV("Sensor DQbuf start");
2312        index = cam_int_dqbuf(&(m_camera_info.sensor));
2313        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
2314
2315        m_recordOutput = shot_ext->shot.ctl.request.outputStreams[2];
2316
2317        matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
2318
2319        if (matchedFrameCnt != -1) {
2320                frameTime = systemTime();
2321        m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
2322            m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, false);
2323            if (m_IsAfModeUpdateRequired) {
2324                ALOGE("### AF Mode change(Mode %d) ", m_afMode);
2325                shot_ext->shot.ctl.aa.afMode = m_afMode;
2326                if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
2327                    ALOGE("### With Automatic triger for continuous modes");
2328                    m_afState = HAL_AFSTATE_STARTED;
2329                    shot_ext->shot.ctl.aa.afTrigger = 1;
2330                    triggered = true;
2331                }
2332                m_IsAfModeUpdateRequired = false;
2333                if (m_afMode2 != NO_CHANGE) {
2334                    enum aa_afmode tempAfMode = m_afMode2;
2335                    m_afMode2 = NO_CHANGE;
2336                    SetAfMode(tempAfMode);
2337                }
2338            }
2339            else {
2340                shot_ext->shot.ctl.aa.afMode = NO_CHANGE;
2341            }
2342            if (m_IsAfTriggerRequired) {
2343                ALOGE("### AF Triggering with mode (%d)", m_afMode);
2344                if (m_afState == HAL_AFSTATE_SCANNING) {
2345                     ALOGE("(%s): restarting trigger ", __FUNCTION__);
2346                }
2347                else {
2348                    if (m_afState != HAL_AFSTATE_NEEDS_COMMAND)
2349                        ALOGE("(%s): wrong trigger state %d", __FUNCTION__, m_afState);
2350                    else
2351                        m_afState = HAL_AFSTATE_STARTED;
2352                }
2353                shot_ext->shot.ctl.aa.afMode = m_afMode;
2354                m_IsAfTriggerRequired = false;
2355            }
2356            else {
2357            }
2358            if (m_wideAspect) {
2359//                shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
2360                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
2361                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
2362            }
2363            else {
2364//                shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
2365            }
2366            if (triggered)
2367                shot_ext->shot.ctl.aa.afTrigger = 1;
2368
2369            // TODO : check collision with AFMode Update
2370            if (m_IsAfLockRequired) {
2371                shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF;
2372                m_IsAfLockRequired = false;
2373            }
2374            ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)",
2375                index,
2376                shot_ext->shot.ctl.request.frameCount,
2377                shot_ext->request_scp,
2378                shot_ext->request_scc,
2379                shot_ext->dis_bypass, sizeof(camera2_shot));
2380
2381            if(shot_ext->request_scc == 1) {
2382                isCapture = true;
2383            }
2384
2385            if(isCapture)
2386            {
2387                for(j = 0; j < m_camera_info.isp.buffers; j++)
2388                {
2389                    shot_ext_capture = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[j].virt.extP[1]);
2390                    shot_ext_capture->request_scc = 1;
2391                }
2392            }
2393
2394            cam_int_qbuf(&(m_camera_info.isp), index);
2395            //m_ispThread->SetSignal(SIGNAL_ISP_START_BAYER_DEQUEUE);
2396
2397            usleep(10000);
2398            if(isCapture)
2399            {
2400                for(j = 0; j < m_camera_info.isp.buffers; j++)
2401                {
2402                    shot_ext_capture = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[j].virt.extP[1]);
2403                    ALOGD("shot_ext_capture[%d] scp = %d, scc = %d", j, shot_ext_capture->request_scp, shot_ext_capture->request_scc);
2404//                    DumpInfoWithShot(shot_ext_capture);
2405                }
2406            }
2407
2408
2409            ALOGV("### isp DQBUF start");
2410            index_isp = cam_int_dqbuf(&(m_camera_info.isp));
2411            //m_previewOutput = 0;
2412
2413            if(isCapture)
2414            {
2415                for(j = 0; j < m_camera_info.isp.buffers; j++)
2416                {
2417                    shot_ext_capture = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[j].virt.extP[1]);
2418                    ALOGD("shot_ext_capture[%d] scp = %d, scc = %d", j, shot_ext_capture->request_scp, shot_ext_capture->request_scc);
2419//                    DumpInfoWithShot(shot_ext_capture);
2420                }
2421            }
2422            shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
2423
2424            ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) shot_size(%d)",
2425                index,
2426                shot_ext->shot.ctl.request.frameCount,
2427                shot_ext->request_scp,
2428                shot_ext->request_scc,
2429                shot_ext->dis_bypass, sizeof(camera2_shot));
2430
2431            if(isCapture) {
2432                    ALOGD("======= request_scc is 1");
2433                    memcpy(&m_jpegMetadata, &shot_ext->shot, sizeof(struct camera2_shot));
2434                    ALOGV("### Saving informationfor jpeg");
2435                    m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
2436
2437                for(j = 0; j < m_camera_info.isp.buffers; j++)
2438                {
2439                    shot_ext_capture = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[j].virt.extP[1]);
2440                    shot_ext_capture->request_scc = 0;
2441                }
2442
2443                isCapture = false;
2444            }
2445
2446            if (shot_ext->request_scp) {
2447                m_previewOutput = 1;
2448                m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
2449            }
2450
2451            if (shot_ext->request_scc) {
2452                m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
2453            }
2454
2455            ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
2456               shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
2457            if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
2458                ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
2459                m_scp_closed = true;
2460            }
2461            else
2462                m_scp_closed = false;
2463
2464            if (!shot_ext->fd_bypass) {
2465                /* FD orientation axis transformation */
2466                for (int i=0; i < CAMERA2_MAX_FACES; i++) {
2467                    if (shot_ext->shot.dm.stats.faceRectangles[i][0] > 0)
2468                        shot_ext->shot.dm.stats.faceRectangles[i][0] = (m_camera2->m_curCameraInfo->sensorW
2469													* shot_ext->shot.dm.stats.faceRectangles[i][0])
2470													/ m_streamThreads[0].get()->m_parameters.outputWidth;
2471                    if (shot_ext->shot.dm.stats.faceRectangles[i][1] > 0)
2472                        shot_ext->shot.dm.stats.faceRectangles[i][1] = (m_camera2->m_curCameraInfo->sensorH
2473													* shot_ext->shot.dm.stats.faceRectangles[i][1])
2474													/ m_streamThreads[0].get()->m_parameters.outputHeight;
2475                    if (shot_ext->shot.dm.stats.faceRectangles[i][2] > 0)
2476                        shot_ext->shot.dm.stats.faceRectangles[i][2] = (m_camera2->m_curCameraInfo->sensorW
2477													* shot_ext->shot.dm.stats.faceRectangles[i][2])
2478													/ m_streamThreads[0].get()->m_parameters.outputWidth;
2479                    if (shot_ext->shot.dm.stats.faceRectangles[i][3] > 0)
2480                        shot_ext->shot.dm.stats.faceRectangles[i][3] = (m_camera2->m_curCameraInfo->sensorH
2481													* shot_ext->shot.dm.stats.faceRectangles[i][3])
2482													/ m_streamThreads[0].get()->m_parameters.outputHeight;
2483                }
2484            }
2485
2486            m_requestManager->ApplyDynamicMetadata(shot_ext);
2487            OnAfNotification(shot_ext->shot.dm.aa.afState);
2488        }
2489
2490        processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]), &afMode);
2491        if (processingReqIndex == -1)
2492        {
2493            ALOGE("DEBUG(%s) req underrun => inserting bubble to BayerIndex(%d)", __FUNCTION__, index);
2494        }
2495        else {
2496            SetAfMode((enum aa_afmode)afMode);
2497        }
2498
2499        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
2500        if (m_scp_closing || m_scp_closed) {
2501            ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
2502            shot_ext->request_scc = 0;
2503            shot_ext->request_scp = 0;
2504            shot_ext->request_sensor = 0;
2505        }
2506
2507//        ALOGD("### Sensor Qbuf start(%d) SCP(%d) SCC(%d) DIS(%d)", index, shot_ext->request_scp, shot_ext->request_scc, shot_ext->dis_bypass);
2508
2509        cam_int_qbuf(&(m_camera_info.sensor), index);
2510        ALOGV("### Sensor QBUF done");
2511
2512        if (!m_closing){
2513            selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2514        }
2515        return;
2516    }
2517    return;
2518}
2519
2520void ExynosCameraHWInterface2::m_ispThreadInitialize(SignalDrivenThread * self)
2521{
2522    ALOGV("DEBUG(%s): ", __FUNCTION__ );
2523    /* will add */
2524    return;
2525}
2526
2527
2528void ExynosCameraHWInterface2::m_ispThreadFunc(SignalDrivenThread * self)
2529{
2530     ALOGV("DEBUG(%s): ", __FUNCTION__ );
2531    /* will add */
2532    return;
2533}
2534
2535void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self)
2536{
2537    StreamThread *          selfThread      = ((StreamThread*)self);
2538    ALOGV("DEBUG(%s): ", __FUNCTION__ );
2539    memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t));
2540    selfThread->m_isBufferInit = false;
2541
2542    return;
2543}
2544
2545void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
2546{
2547    uint32_t                currentSignal   = self->GetProcessingSignal();
2548    StreamThread *          selfThread      = ((StreamThread*)self);
2549    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
2550    record_parameters_t     *selfRecordParms =  &(selfThread->m_recordParameters);
2551    node_info_t             *currentNode    = &(selfStreamParms->node);
2552
2553    ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal);
2554
2555    if (currentSignal & SIGNAL_STREAM_CHANGE_PARAMETER) {
2556        ALOGV("DEBUG(%s): processing SIGNAL_STREAM_CHANGE_PARAMETER", __FUNCTION__);
2557        selfThread->applyChange();
2558        if (selfStreamParms->streamType == STREAM_TYPE_INDIRECT) {
2559            m_resizeBuf.size.extS[0] = ALIGN(selfStreamParms->outputWidth, 16) * ALIGN(selfStreamParms->outputHeight, 16) * 2;
2560            m_resizeBuf.size.extS[1] = 0;
2561            m_resizeBuf.size.extS[2] = 0;
2562
2563            if (allocCameraMemory(selfStreamParms->ionClient, &m_resizeBuf, 1) == -1) {
2564                ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__);
2565            }
2566        }
2567        ALOGV("DEBUG(%s): processing SIGNAL_STREAM_CHANGE_PARAMETER DONE", __FUNCTION__);
2568    }
2569
2570    if (currentSignal & SIGNAL_THREAD_RELEASE) {
2571        int i, index = -1, cnt_to_dq = 0;
2572        status_t res;
2573        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2574        ALOGD("(%s):(%d) SIGNAL_THREAD_RELEASE", __FUNCTION__, selfStreamParms->streamType);
2575
2576        if (selfThread->m_isBufferInit) {
2577            for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
2578                ALOGV("DEBUG(%s): checking buffer index[%d] - status(%d)",
2579                    __FUNCTION__, i, selfStreamParms->svcBufStatus[i]);
2580                if (selfStreamParms->svcBufStatus[i] ==ON_DRIVER) cnt_to_dq++;
2581            }
2582
2583            ALOGV("DEBUG(%s): calling stream(%d) streamoff (fd:%d)", __FUNCTION__,
2584            selfThread->m_index, selfStreamParms->fd);
2585            if (cam_int_streamoff(&(selfStreamParms->node)) < 0 ){
2586                ALOGE("ERR(%s): stream off fail", __FUNCTION__);
2587            } else {
2588                if (selfStreamParms->streamType == STREAM_TYPE_DIRECT) {
2589                    m_scp_closing = true;
2590                } else {
2591                    m_camera_info.capture.status = false;
2592                }
2593            }
2594            ALOGV("DEBUG(%s): calling stream(%d) streamoff done", __FUNCTION__, selfThread->m_index);
2595            ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 (fd:%d)", __FUNCTION__,
2596                    selfThread->m_index, selfStreamParms->fd);
2597            currentNode->buffers = 0;
2598            cam_int_reqbufs(currentNode);
2599            ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 DONE(fd:%d)", __FUNCTION__,
2600                    selfThread->m_index, selfStreamParms->fd);
2601        }
2602        if (selfThread->m_index == 1 && m_resizeBuf.size.s != 0) {
2603            freeCameraMemory(&m_resizeBuf, 1);
2604        }
2605        selfThread->m_isBufferInit = false;
2606        selfThread->m_index = 255;
2607
2608        selfThread->m_releasing = false;
2609
2610        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
2611
2612        return;
2613    }
2614
2615    if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
2616        buffer_handle_t * buf = NULL;
2617        status_t res;
2618        void *virtAddr[3];
2619        int i, j;
2620        int index;
2621        nsecs_t timestamp;
2622
2623        ALOGV("DEBUG(%s): stream(%d) processing SIGNAL_STREAM_DATA_COMING",
2624            __FUNCTION__,selfThread->m_index);
2625
2626        if (selfStreamParms->streamType == STREAM_TYPE_INDIRECT)
2627        {
2628            ALOGD("stream(%s) processing SIGNAL_STREAM_DATA_COMING",
2629                __FUNCTION__,selfThread->m_index);
2630        }
2631
2632        if (!(selfThread->m_isBufferInit)) {
2633            for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
2634                res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
2635                if (res != NO_ERROR || buf == NULL) {
2636                    ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
2637                    return;
2638                }
2639                ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
2640                   ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
2641
2642                if (m_grallocHal->lock(m_grallocHal, *buf,
2643                           selfStreamParms->usage,
2644                           0, 0, selfStreamParms->outputWidth, selfStreamParms->outputHeight, virtAddr) != 0) {
2645                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
2646                    return;
2647                }
2648                ALOGV("DEBUG(%s): locked img buf plane0(%x) plane1(%x) plane2(%x)",
2649                __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
2650
2651                index = selfThread->findBufferIndex(virtAddr[0]);
2652                if (index == -1) {
2653                    ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
2654                }
2655                else {
2656                    ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
2657                        __FUNCTION__, index, selfStreamParms->svcBufStatus[index]);
2658                    if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC)
2659                        selfStreamParms->svcBufStatus[index] = ON_DRIVER;
2660                    else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE)
2661                        selfStreamParms->svcBufStatus[index] = ON_HAL;
2662                    else {
2663                        ALOGV("DBG(%s): buffer status abnormal (%d) "
2664                            , __FUNCTION__, selfStreamParms->svcBufStatus[index]);
2665                    }
2666                    selfStreamParms->numSvcBufsInHal++;
2667                    if (*buf != selfStreamParms->svcBufHandle[index])
2668                        ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
2669                    else
2670                        ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
2671                }
2672                selfStreamParms->svcBufIndex = 0;
2673            }
2674            selfThread->m_isBufferInit = true;
2675        }
2676
2677        if (m_recordingEnabled && m_needsRecordBufferInit) {
2678            ALOGV("DEBUG(%s): Recording Buffer Initialization numsvcbuf(%d)",
2679                __FUNCTION__, selfRecordParms->numSvcBuffers);
2680            int checkingIndex = 0;
2681            bool found = false;
2682            for ( i=0 ; i < selfRecordParms->numSvcBuffers; i++) {
2683                res = selfRecordParms->streamOps->dequeue_buffer(selfRecordParms->streamOps, &buf);
2684                if (res != NO_ERROR || buf == NULL) {
2685                    ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
2686                    return;
2687                }
2688                selfRecordParms->numSvcBufsInHal++;
2689                ALOGV("DEBUG(%s): [record] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
2690                   selfRecordParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
2691
2692                if (m_grallocHal->lock(m_grallocHal, *buf,
2693                       selfRecordParms->usage, 0, 0,
2694                       selfRecordParms->outputWidth, selfRecordParms->outputHeight, virtAddr) != 0) {
2695                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
2696                }
2697                else {
2698                      ALOGV("DEBUG(%s): [record] locked img buf plane0(%x) plane1(%x) plane2(%x)",
2699                        __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
2700                }
2701                found = false;
2702                for (checkingIndex = 0; checkingIndex < selfRecordParms->numSvcBuffers ; checkingIndex++) {
2703                    if (selfRecordParms->svcBufHandle[checkingIndex] == *buf ) {
2704                        found = true;
2705                        break;
2706                    }
2707                }
2708                ALOGV("DEBUG(%s): [record] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
2709                if (!found) break;
2710
2711                index = checkingIndex;
2712
2713                if (index == -1) {
2714                    ALOGV("ERR(%s): could not find buffer index", __FUNCTION__);
2715                }
2716                else {
2717                    ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
2718                        __FUNCTION__, index, selfRecordParms->svcBufStatus[index]);
2719                    if (selfRecordParms->svcBufStatus[index]== ON_SERVICE)
2720                        selfRecordParms->svcBufStatus[index] = ON_HAL;
2721                    else {
2722                        ALOGV("DBG(%s): buffer status abnormal (%d) "
2723                            , __FUNCTION__, selfRecordParms->svcBufStatus[index]);
2724                    }
2725                    if (*buf != selfRecordParms->svcBufHandle[index])
2726                        ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
2727                    else
2728                        ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
2729                }
2730                selfRecordParms->svcBufIndex = 0;
2731            }
2732            m_needsRecordBufferInit = false;
2733        }
2734
2735        do {
2736            if (selfStreamParms->streamType == STREAM_TYPE_DIRECT) {
2737                ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF START ",__FUNCTION__,
2738                    selfThread->m_index, selfStreamParms->streamType);
2739
2740                index = cam_int_dqbuf(&(selfStreamParms->node));
2741                ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
2742                    selfThread->m_index, selfStreamParms->streamType, index);
2743
2744
2745                if (selfStreamParms->svcBufStatus[index] !=  ON_DRIVER)
2746                    ALOGV("DBG(%s): DQed buffer status abnormal (%d) ",
2747                           __FUNCTION__, selfStreamParms->svcBufStatus[index]);
2748                selfStreamParms->svcBufStatus[index] = ON_HAL;
2749
2750                if (m_recordOutput && m_recordingEnabled) {
2751                    ALOGV("DEBUG(%s): Entering record frame creator, index(%d)",__FUNCTION__, selfRecordParms->svcBufIndex);
2752                    bool found = false;
2753                    for (int i = 0 ; selfRecordParms->numSvcBuffers ; i++) {
2754                        if (selfRecordParms->svcBufStatus[selfRecordParms->svcBufIndex] == ON_HAL) {
2755                            found = true;
2756                            break;
2757                        }
2758                        selfRecordParms->svcBufIndex++;
2759                        if (selfRecordParms->svcBufIndex >= selfRecordParms->numSvcBuffers)
2760                            selfRecordParms->svcBufIndex = 0;
2761                    }
2762                    if (!found) {
2763                        ALOGE("(%s): cannot find free recording buffer", __FUNCTION__);
2764                        selfRecordParms->svcBufIndex++;
2765                        break;
2766                    }
2767
2768                    if (m_exynosVideoCSC) {
2769                        int videoW = selfRecordParms->outputWidth, videoH = selfRecordParms->outputHeight;
2770                        int cropX, cropY, cropW, cropH = 0;
2771                        int previewW = selfStreamParms->outputWidth, previewH = selfStreamParms->outputHeight;
2772                        m_getRatioSize(previewW, previewH,
2773                                       videoW, videoH,
2774                                       &cropX, &cropY,
2775                                       &cropW, &cropH,
2776                                       0);
2777
2778                        ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
2779                                 __FUNCTION__, cropX, cropY, cropW, cropH);
2780
2781                        csc_set_src_format(m_exynosVideoCSC,
2782                                           previewW, previewH,
2783                                           cropX, cropY, cropW, cropH,
2784                                           HAL_PIXEL_FORMAT_EXYNOS_YV12,
2785                                           0);
2786
2787                        csc_set_dst_format(m_exynosVideoCSC,
2788                                           videoW, videoH,
2789                                           0, 0, videoW, videoH,
2790                                           selfRecordParms->outputFormat,
2791                                           1);
2792
2793                        csc_set_src_buffer(m_exynosVideoCSC,
2794                                       (void **)(&(selfStreamParms->svcBuffers[index].fd.fd)));
2795
2796                        csc_set_dst_buffer(m_exynosVideoCSC,
2797                            (void **)(&(selfRecordParms->svcBuffers[selfRecordParms->svcBufIndex].fd.fd)));
2798
2799                        if (csc_convert(m_exynosVideoCSC) != 0) {
2800                            ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__);
2801                        }
2802                        else {
2803                            ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__);
2804                        }
2805                    }
2806                    else {
2807                        ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
2808                    }
2809
2810                    res = selfRecordParms->streamOps->enqueue_buffer(selfRecordParms->streamOps,
2811                            systemTime(),
2812                            &(selfRecordParms->svcBufHandle[selfRecordParms->svcBufIndex]));
2813                    ALOGV("DEBUG(%s): stream(%d) record enqueue_buffer to svc done res(%d)", __FUNCTION__,
2814                        selfThread->m_index, res);
2815                    if (res == 0) {
2816                        selfRecordParms->svcBufStatus[selfRecordParms->svcBufIndex] = ON_SERVICE;
2817                        selfRecordParms->numSvcBufsInHal--;
2818                    }
2819                }
2820                if (m_previewOutput && m_requestManager->GetSkipCnt() <= 0) {
2821
2822                    ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex());
2823                    res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
2824                            m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex()),
2825                            &(selfStreamParms->svcBufHandle[index]));
2826
2827                    ALOGV("DEBUG(%s): stream(%d) enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
2828                }
2829                else {
2830                    res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
2831                            &(selfStreamParms->svcBufHandle[index]));
2832                    ALOGV("DEBUG(%s): stream(%d) cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
2833                }
2834                if (res == 0) {
2835                    selfStreamParms->svcBufStatus[index] = ON_SERVICE;
2836                    selfStreamParms->numSvcBufsInHal--;
2837                }
2838                else {
2839                    selfStreamParms->svcBufStatus[index] = ON_HAL;
2840                }
2841            }
2842            else if (selfStreamParms->streamType == STREAM_TYPE_INDIRECT) {
2843                ExynosRect jpegRect;
2844                bool found = false;
2845                bool ret = false;
2846                int pictureW, pictureH, pictureFramesize = 0;
2847                int pictureFormat;
2848                int cropX, cropY, cropW, cropH = 0;
2849                ExynosBuffer resizeBufInfo;
2850                ExynosRect   m_orgPictureRect;
2851
2852                ALOGD("DEBUG(%s): stream(%d) type(%d) DQBUF START ",__FUNCTION__,
2853                    selfThread->m_index, selfStreamParms->streamType);
2854                index = cam_int_dqbuf(&(selfStreamParms->node));
2855                ALOGD("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
2856                    selfThread->m_index, selfStreamParms->streamType, index);
2857
2858
2859                for (int i = 0; i < selfStreamParms->numSvcBuffers ; i++) {
2860                    if (selfStreamParms->svcBufStatus[selfStreamParms->svcBufIndex] == ON_HAL) {
2861                        found = true;
2862                        break;
2863                    }
2864                    selfStreamParms->svcBufIndex++;
2865                    if (selfStreamParms->svcBufIndex >= selfStreamParms->numSvcBuffers)
2866                        selfStreamParms->svcBufIndex = 0;
2867                }
2868                if (!found) {
2869                    ALOGE("ERR(%s): NO free SVC buffer for JPEG", __FUNCTION__);
2870                    break;
2871                }
2872
2873                m_orgPictureRect.w = selfStreamParms->outputWidth;
2874                m_orgPictureRect.h = selfStreamParms->outputHeight;
2875
2876                ExynosBuffer* m_pictureBuf = &(m_camera_info.capture.buffer[index]);
2877
2878                pictureW = selfStreamParms->nodeWidth;
2879                pictureH = selfStreamParms->nodeHeight;
2880                pictureFormat = V4L2_PIX_FMT_YUYV;
2881                pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
2882
2883                if (m_exynosPictureCSC) {
2884                    m_getRatioSize(pictureW, pictureH,
2885                                   m_orgPictureRect.w, m_orgPictureRect.h,
2886                                   &cropX, &cropY,
2887                                   &cropW, &cropH,
2888                                   0);
2889
2890                    ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
2891                          __FUNCTION__, cropX, cropY, cropW, cropH);
2892
2893                    csc_set_src_format(m_exynosPictureCSC,
2894                                       ALIGN(pictureW, 16), ALIGN(pictureH, 16),
2895                                       cropX, cropY, cropW, cropH,
2896                                       V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
2897                                       0);
2898
2899                    csc_set_dst_format(m_exynosPictureCSC,
2900                                       m_orgPictureRect.w, m_orgPictureRect.h,
2901                                       0, 0, m_orgPictureRect.w, m_orgPictureRect.h,
2902                                       V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
2903                                       0);
2904                    csc_set_src_buffer(m_exynosPictureCSC,
2905                                       (void **)&m_pictureBuf->fd.fd);
2906
2907                    csc_set_dst_buffer(m_exynosPictureCSC,
2908                                       (void **)&m_resizeBuf.fd.fd);
2909                    for (int i = 0 ; i < 3 ; i++)
2910                        ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d",
2911                            __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
2912
2913                    if (csc_convert(m_exynosPictureCSC) != 0)
2914                        ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__);
2915
2916
2917                }
2918                else {
2919                    ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__);
2920                }
2921
2922                resizeBufInfo = m_resizeBuf;
2923
2924                m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_orgPictureRect.w, m_orgPictureRect.h, &m_resizeBuf);
2925
2926                for (int i = 1; i < 3; i++) {
2927                    if (m_resizeBuf.size.extS[i] != 0)
2928                        m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1];
2929
2930                    ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]);
2931                }
2932
2933                jpegRect.w = m_orgPictureRect.w;
2934                jpegRect.h = m_orgPictureRect.h;
2935                jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
2936
2937                if (yuv2Jpeg(&m_resizeBuf, &selfStreamParms->svcBuffers[selfStreamParms->svcBufIndex], &jpegRect) == false)
2938                    ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
2939                cam_int_qbuf(&(selfStreamParms->node), index);
2940                ALOGV("DEBUG(%s): stream(%d) type(%d) QBUF DONE ",__FUNCTION__,
2941                    selfThread->m_index, selfStreamParms->streamType);
2942
2943                m_resizeBuf = resizeBufInfo;
2944
2945                res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, systemTime(), &(selfStreamParms->svcBufHandle[selfStreamParms->svcBufIndex]));
2946
2947                ALOGV("DEBUG(%s): stream(%d) enqueue_buffer index(%d) to svc done res(%d)",
2948                        __FUNCTION__, selfThread->m_index, selfStreamParms->svcBufIndex, res);
2949                if (res == 0) {
2950                    selfStreamParms->svcBufStatus[selfStreamParms->svcBufIndex] = ON_SERVICE;
2951                    selfStreamParms->numSvcBufsInHal--;
2952                }
2953                else {
2954                    selfStreamParms->svcBufStatus[selfStreamParms->svcBufIndex] = ON_HAL;
2955                }
2956            }
2957        }
2958        while (0);
2959
2960        if (selfStreamParms->streamType == STREAM_TYPE_DIRECT  && m_recordOutput && m_recordingEnabled) {
2961            do {
2962                ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , selfRecordParms->numSvcBufsInHal);
2963                if (selfRecordParms->numSvcBufsInHal >= 1)
2964                {
2965                    ALOGV("DEBUG(%s): breaking", __FUNCTION__);
2966                    break;
2967                }
2968                res = selfRecordParms->streamOps->dequeue_buffer(selfRecordParms->streamOps, &buf);
2969                if (res != NO_ERROR || buf == NULL) {
2970                    ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
2971                    break;
2972                }
2973                selfRecordParms->numSvcBufsInHal ++;
2974                ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
2975                   selfRecordParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
2976
2977                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
2978                bool found = false;
2979                int checkingIndex = 0;
2980                for (checkingIndex = 0; checkingIndex < selfRecordParms->numSvcBuffers ; checkingIndex++) {
2981                    if (priv_handle->fd == selfRecordParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
2982                        found = true;
2983                        break;
2984                    }
2985                }
2986                ALOGV("DEBUG(%s): recording dequeueed_buffer found index(%d)", __FUNCTION__, found);
2987
2988                if (!found) {
2989                     break;
2990                }
2991
2992                index = checkingIndex;
2993                if (selfRecordParms->svcBufStatus[index] == ON_SERVICE) {
2994                    selfRecordParms->svcBufStatus[index] = ON_HAL;
2995                }
2996                else {
2997                    ALOGV("DEBUG(%s): record bufstatus abnormal [%d]  status = %d", __FUNCTION__,
2998                        index,  selfRecordParms->svcBufStatus[index]);
2999                }
3000            } while (0);
3001        }
3002        if (selfStreamParms->streamType == STREAM_TYPE_DIRECT) {
3003            while (selfStreamParms->numSvcBufsInHal < selfStreamParms->numOwnSvcBuffers) {
3004                res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3005                if (res != NO_ERROR || buf == NULL) {
3006                    ALOGV("DEBUG(%s): stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
3007                    break;
3008                }
3009                selfStreamParms->numSvcBufsInHal++;
3010                ALOGV("DEBUG(%s): stream(%d) got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
3011                    selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
3012                   ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3013                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3014
3015                bool found = false;
3016                int checkingIndex = 0;
3017                for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3018                    if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3019                        found = true;
3020                        break;
3021                    }
3022                }
3023                ALOGV("DEBUG(%s): post_dequeue_buffer found(%d)", __FUNCTION__, found);
3024                if (!found) break;
3025                ALOGV("DEBUG(%s): preparing to qbuf [%d]", __FUNCTION__, checkingIndex);
3026                index = checkingIndex;
3027                if (index < selfStreamParms->numHwBuffers) {
3028                    uint32_t    plane_index = 0;
3029                    ExynosBuffer*  currentBuf = &(selfStreamParms->svcBuffers[index]);
3030                    struct v4l2_buffer v4l2_buf;
3031                    struct v4l2_plane  planes[VIDEO_MAX_PLANES];
3032
3033                    v4l2_buf.m.planes   = planes;
3034                    v4l2_buf.type       = currentNode->type;
3035                    v4l2_buf.memory     = currentNode->memory;
3036                    v4l2_buf.index      = index;
3037                    v4l2_buf.length     = currentNode->planes;
3038
3039                    v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
3040                    v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
3041                    v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
3042                    for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
3043                        v4l2_buf.m.planes[plane_index].length  = currentBuf->size.extS[plane_index];
3044                        ALOGV("DEBUG(%s): plane(%d): fd(%d)  length(%d)",
3045                             __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
3046                             v4l2_buf.m.planes[plane_index].length);
3047                    }
3048                    if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
3049                        ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail",
3050                            __FUNCTION__, selfThread->m_index);
3051                        return;
3052                    }
3053                    selfStreamParms->svcBufStatus[index] = ON_DRIVER;
3054                    ALOGV("DEBUG(%s): stream id(%d) type0 QBUF done index(%d)",
3055                        __FUNCTION__, selfThread->m_index, index);
3056                }
3057            }
3058        }
3059        else if (selfStreamParms->streamType == STREAM_TYPE_INDIRECT) {
3060            while (selfStreamParms->numSvcBufsInHal < selfStreamParms->numOwnSvcBuffers) {
3061                res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3062                if (res != NO_ERROR || buf == NULL) {
3063                    ALOGV("DEBUG(%s): stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
3064                    break;
3065                }
3066
3067                ALOGV("DEBUG(%s): stream(%d) got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
3068                    selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
3069                   ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3070
3071                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3072
3073                bool found = false;
3074                int checkingIndex = 0;
3075                for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3076                    if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3077                        found = true;
3078                        break;
3079                    }
3080                }
3081                if (!found) break;
3082                selfStreamParms->svcBufStatus[checkingIndex] = ON_HAL;
3083                selfStreamParms->numSvcBufsInHal++;
3084            }
3085
3086        }
3087        ALOGV("DEBUG(%s): stream(%d) processing SIGNAL_STREAM_DATA_COMING DONE",
3088            __FUNCTION__,selfThread->m_index);
3089    }
3090    return;
3091}
3092
3093bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
3094                            ExynosBuffer *jpegBuf,
3095                            ExynosRect *rect)
3096{
3097    unsigned char *addr;
3098
3099    int thumbW = 320;
3100    int thumbH = 240;
3101
3102    ExynosJpegEncoderForCamera jpegEnc;
3103    bool ret = false;
3104    int res = 0;
3105
3106    unsigned int *yuvSize = yuvBuf->size.extS;
3107
3108    if (jpegEnc.create()) {
3109        ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__);
3110        goto jpeg_encode_done;
3111    }
3112
3113    if (jpegEnc.setQuality(100)) {
3114        ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__);
3115        goto jpeg_encode_done;
3116    }
3117
3118    if (jpegEnc.setSize(rect->w, rect->h)) {
3119        ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__);
3120        goto jpeg_encode_done;
3121    }
3122    ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h);
3123
3124    if (jpegEnc.setColorFormat(rect->colorFormat)) {
3125        ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__);
3126        goto jpeg_encode_done;
3127    }
3128
3129    if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) {
3130        ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
3131        goto jpeg_encode_done;
3132    }
3133
3134    mExifInfo.enableThumb = true;
3135
3136    if (jpegEnc.setThumbnailSize(thumbW, thumbH)) {
3137        ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, thumbW, thumbH);
3138        goto jpeg_encode_done;
3139    }
3140
3141    ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, thumbW, thumbH);
3142    if (jpegEnc.setThumbnailQuality(50)) {
3143        ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__);
3144        goto jpeg_encode_done;
3145    }
3146
3147    m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata);
3148    ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
3149    if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) {
3150        ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
3151        goto jpeg_encode_done;
3152    }
3153    if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) {
3154        ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__);
3155        goto jpeg_encode_done;
3156    }
3157
3158    if (jpegEnc.updateConfig()) {
3159        ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__);
3160        goto jpeg_encode_done;
3161    }
3162
3163    if (res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo)) {
3164        ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res);
3165        goto jpeg_encode_done;
3166    }
3167
3168    ret = true;
3169
3170jpeg_encode_done:
3171
3172    if (jpegEnc.flagCreate() == true)
3173        jpegEnc.destroy();
3174
3175    return ret;
3176}
3177
3178
3179void ExynosCameraHWInterface2::OnAfTrigger(int id)
3180{
3181    switch (m_afMode) {
3182    case AA_AFMODE_AUTO:
3183    case AA_AFMODE_MACRO:
3184        OnAfTriggerAutoMacro(id);
3185        break;
3186    case AA_AFMODE_CONTINUOUS_VIDEO:
3187        OnAfTriggerCAFVideo(id);
3188        break;
3189    case AA_AFMODE_CONTINUOUS_PICTURE:
3190        OnAfTriggerCAFPicture(id);
3191        break;
3192    case AA_AFMODE_OFF:
3193    default:
3194        break;
3195    }
3196}
3197
3198void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int id)
3199{
3200    int nextState = NO_TRANSITION;
3201    m_afTriggerId = id;
3202
3203    switch (m_afState) {
3204    case HAL_AFSTATE_INACTIVE:
3205        nextState = HAL_AFSTATE_NEEDS_COMMAND;
3206        m_IsAfTriggerRequired = true;
3207        break;
3208    case HAL_AFSTATE_NEEDS_COMMAND:
3209        nextState = NO_TRANSITION;
3210        break;
3211    case HAL_AFSTATE_STARTED:
3212        nextState = NO_TRANSITION;
3213        break;
3214    case HAL_AFSTATE_SCANNING:
3215        nextState = NO_TRANSITION;
3216        break;
3217    case HAL_AFSTATE_LOCKED:
3218        nextState = HAL_AFSTATE_NEEDS_COMMAND;
3219        m_IsAfTriggerRequired = true;
3220        break;
3221    case HAL_AFSTATE_FAILED:
3222        nextState = HAL_AFSTATE_NEEDS_COMMAND;
3223        m_IsAfTriggerRequired = true;
3224        break;
3225    default:
3226        break;
3227    }
3228    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
3229    if (nextState != NO_TRANSITION)
3230        m_afState = nextState;
3231}
3232
3233void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id)
3234{
3235    int nextState = NO_TRANSITION;
3236    m_afTriggerId = id;
3237
3238    switch (m_afState) {
3239    case HAL_AFSTATE_INACTIVE:
3240        nextState = HAL_AFSTATE_FAILED;
3241        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3242        break;
3243    case HAL_AFSTATE_NEEDS_COMMAND:
3244        // not used
3245        break;
3246    case HAL_AFSTATE_STARTED:
3247        nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
3248        m_AfHwStateFailed = false;
3249        break;
3250    case HAL_AFSTATE_SCANNING:
3251        nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
3252        m_AfHwStateFailed = false;
3253        break;
3254    case HAL_AFSTATE_NEEDS_DETERMINATION:
3255        nextState = NO_TRANSITION;
3256        break;
3257    case HAL_AFSTATE_PASSIVE_FOCUSED:
3258        m_IsAfLockRequired = true;
3259        if (m_AfHwStateFailed) {
3260            ALOGV("(%s): LAST : fail", __FUNCTION__);
3261            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3262            nextState = HAL_AFSTATE_FAILED;
3263        }
3264        else {
3265            ALOGV("(%s): LAST : success", __FUNCTION__);
3266            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
3267            nextState = HAL_AFSTATE_LOCKED;
3268        }
3269        m_AfHwStateFailed = false;
3270        break;
3271    case HAL_AFSTATE_LOCKED:
3272        nextState = NO_TRANSITION;
3273        break;
3274    case HAL_AFSTATE_FAILED:
3275        nextState = NO_TRANSITION;
3276        break;
3277    default:
3278        break;
3279    }
3280    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
3281    if (nextState != NO_TRANSITION)
3282        m_afState = nextState;
3283}
3284
3285
3286void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int id)
3287{
3288    int nextState = NO_TRANSITION;
3289    m_afTriggerId = id;
3290
3291    switch (m_afState) {
3292    case HAL_AFSTATE_INACTIVE:
3293        nextState = HAL_AFSTATE_FAILED;
3294        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3295        break;
3296    case HAL_AFSTATE_NEEDS_COMMAND:
3297        // not used
3298        break;
3299    case HAL_AFSTATE_STARTED:
3300        m_IsAfLockRequired = true;
3301        nextState = HAL_AFSTATE_FAILED;
3302        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3303        break;
3304    case HAL_AFSTATE_SCANNING:
3305        m_IsAfLockRequired = true;
3306        nextState = HAL_AFSTATE_FAILED;
3307        SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3308        break;
3309    case HAL_AFSTATE_NEEDS_DETERMINATION:
3310        // not used
3311        break;
3312    case HAL_AFSTATE_PASSIVE_FOCUSED:
3313        m_IsAfLockRequired = true;
3314        SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
3315        nextState = HAL_AFSTATE_LOCKED;
3316        break;
3317    case HAL_AFSTATE_LOCKED:
3318        nextState = NO_TRANSITION;
3319        break;
3320    case HAL_AFSTATE_FAILED:
3321        nextState = NO_TRANSITION;
3322        break;
3323    default:
3324        break;
3325    }
3326    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
3327    if (nextState != NO_TRANSITION)
3328        m_afState = nextState;
3329}
3330
3331void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti)
3332{
3333    switch (m_afMode) {
3334    case AA_AFMODE_AUTO:
3335    case AA_AFMODE_MACRO:
3336        OnAfNotificationAutoMacro(noti);
3337        break;
3338    case AA_AFMODE_CONTINUOUS_VIDEO:
3339        OnAfNotificationCAFVideo(noti);
3340        break;
3341    case AA_AFMODE_CONTINUOUS_PICTURE:
3342        OnAfNotificationCAFPicture(noti);
3343        break;
3344    case AA_AFMODE_OFF:
3345    default:
3346        break;
3347    }
3348}
3349
3350void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti)
3351{
3352    int nextState = NO_TRANSITION;
3353    bool bWrongTransition = false;
3354
3355    if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) {
3356        switch (noti) {
3357        case AA_AFSTATE_INACTIVE:
3358        case AA_AFSTATE_ACTIVE_SCAN:
3359        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3360        case AA_AFSTATE_AF_FAILED_FOCUS:
3361        default:
3362            nextState = NO_TRANSITION;
3363            break;
3364        }
3365    }
3366    else if (m_afState == HAL_AFSTATE_STARTED) {
3367        switch (noti) {
3368        case AA_AFSTATE_INACTIVE:
3369            nextState = NO_TRANSITION;
3370            break;
3371        case AA_AFSTATE_ACTIVE_SCAN:
3372            nextState = HAL_AFSTATE_SCANNING;
3373            SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN);
3374            break;
3375        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3376            nextState = NO_TRANSITION;
3377            break;
3378        case AA_AFSTATE_AF_FAILED_FOCUS:
3379            nextState = NO_TRANSITION;
3380            break;
3381        default:
3382            bWrongTransition = true;
3383            break;
3384        }
3385    }
3386    else if (m_afState == HAL_AFSTATE_SCANNING) {
3387        switch (noti) {
3388        case AA_AFSTATE_INACTIVE:
3389            bWrongTransition = true;
3390            break;
3391        case AA_AFSTATE_ACTIVE_SCAN:
3392            nextState = NO_TRANSITION;
3393            break;
3394        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3395            nextState = HAL_AFSTATE_LOCKED;
3396            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
3397            break;
3398        case AA_AFSTATE_AF_FAILED_FOCUS:
3399            nextState = HAL_AFSTATE_FAILED;
3400            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3401            break;
3402        default:
3403            bWrongTransition = true;
3404            break;
3405        }
3406    }
3407    else if (m_afState == HAL_AFSTATE_LOCKED) {
3408        switch (noti) {
3409            case AA_AFSTATE_INACTIVE:
3410            case AA_AFSTATE_ACTIVE_SCAN:
3411                bWrongTransition = true;
3412                break;
3413            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3414                nextState = NO_TRANSITION;
3415                break;
3416            case AA_AFSTATE_AF_FAILED_FOCUS:
3417            default:
3418                bWrongTransition = true;
3419                break;
3420        }
3421    }
3422    else if (m_afState == HAL_AFSTATE_FAILED) {
3423        switch (noti) {
3424            case AA_AFSTATE_INACTIVE:
3425            case AA_AFSTATE_ACTIVE_SCAN:
3426            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3427                bWrongTransition = true;
3428                break;
3429            case AA_AFSTATE_AF_FAILED_FOCUS:
3430                nextState = NO_TRANSITION;
3431                break;
3432            default:
3433                bWrongTransition = true;
3434                break;
3435        }
3436    }
3437    if (bWrongTransition) {
3438        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
3439        return;
3440    }
3441    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
3442    if (nextState != NO_TRANSITION)
3443        m_afState = nextState;
3444}
3445
3446void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti)
3447{
3448    int nextState = NO_TRANSITION;
3449    bool bWrongTransition = false;
3450
3451    if (m_afState == HAL_AFSTATE_INACTIVE) {
3452        switch (noti) {
3453        case AA_AFSTATE_INACTIVE:
3454        case AA_AFSTATE_ACTIVE_SCAN:
3455        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3456        case AA_AFSTATE_AF_FAILED_FOCUS:
3457        default:
3458            nextState = NO_TRANSITION;
3459            break;
3460        }
3461    }
3462    else if (m_afState == HAL_AFSTATE_STARTED) {
3463        switch (noti) {
3464        case AA_AFSTATE_INACTIVE:
3465            nextState = NO_TRANSITION;
3466            break;
3467        case AA_AFSTATE_ACTIVE_SCAN:
3468            nextState = HAL_AFSTATE_SCANNING;
3469            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
3470            break;
3471        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3472            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
3473            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
3474            break;
3475        case AA_AFSTATE_AF_FAILED_FOCUS:
3476            nextState = HAL_AFSTATE_FAILED;
3477            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3478            break;
3479        default:
3480            bWrongTransition = true;
3481            break;
3482        }
3483    }
3484    else if (m_afState == HAL_AFSTATE_SCANNING) {
3485        switch (noti) {
3486        case AA_AFSTATE_INACTIVE:
3487            nextState = NO_TRANSITION;
3488            break;
3489        case AA_AFSTATE_ACTIVE_SCAN:
3490            nextState = NO_TRANSITION;
3491            m_AfHwStateFailed = false;
3492            break;
3493        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3494            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
3495            m_AfHwStateFailed = false;
3496            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
3497            break;
3498        case AA_AFSTATE_AF_FAILED_FOCUS:
3499            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
3500            m_AfHwStateFailed = true;
3501            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
3502            break;
3503        default:
3504            bWrongTransition = true;
3505            break;
3506        }
3507    }
3508    else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
3509        switch (noti) {
3510        case AA_AFSTATE_INACTIVE:
3511            nextState = NO_TRANSITION;
3512            break;
3513        case AA_AFSTATE_ACTIVE_SCAN:
3514            nextState = HAL_AFSTATE_SCANNING;
3515            m_AfHwStateFailed = false;
3516            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
3517            break;
3518        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3519            nextState = NO_TRANSITION;
3520            m_AfHwStateFailed = false;
3521            break;
3522        case AA_AFSTATE_AF_FAILED_FOCUS:
3523            nextState = NO_TRANSITION;
3524            m_AfHwStateFailed = true;
3525            break;
3526        default:
3527            bWrongTransition = true;
3528            break;
3529        }
3530    }
3531    else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
3532        switch (noti) {
3533        case AA_AFSTATE_INACTIVE:
3534            nextState = NO_TRANSITION;
3535            break;
3536        case AA_AFSTATE_ACTIVE_SCAN:
3537            nextState = NO_TRANSITION;
3538            break;
3539        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3540            m_IsAfLockRequired = true;
3541            nextState = HAL_AFSTATE_LOCKED;
3542            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
3543            break;
3544        case AA_AFSTATE_AF_FAILED_FOCUS:
3545            m_IsAfLockRequired = true;
3546            nextState = HAL_AFSTATE_FAILED;
3547            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3548            break;
3549        default:
3550            bWrongTransition = true;
3551            break;
3552        }
3553    }
3554    else if (m_afState == HAL_AFSTATE_LOCKED) {
3555        switch (noti) {
3556            case AA_AFSTATE_INACTIVE:
3557                nextState = NO_TRANSITION;
3558                break;
3559            case AA_AFSTATE_ACTIVE_SCAN:
3560                bWrongTransition = true;
3561                break;
3562            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3563                nextState = NO_TRANSITION;
3564                break;
3565            case AA_AFSTATE_AF_FAILED_FOCUS:
3566            default:
3567                bWrongTransition = true;
3568                break;
3569        }
3570    }
3571    else if (m_afState == HAL_AFSTATE_FAILED) {
3572        switch (noti) {
3573            case AA_AFSTATE_INACTIVE:
3574                bWrongTransition = true;
3575                break;
3576            case AA_AFSTATE_ACTIVE_SCAN:
3577                nextState = HAL_AFSTATE_SCANNING;
3578                break;
3579            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3580                bWrongTransition = true;
3581                break;
3582            case AA_AFSTATE_AF_FAILED_FOCUS:
3583                nextState = NO_TRANSITION;
3584                break;
3585            default:
3586                bWrongTransition = true;
3587                break;
3588        }
3589    }
3590    if (bWrongTransition) {
3591        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
3592        return;
3593    }
3594    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
3595    if (nextState != NO_TRANSITION)
3596        m_afState = nextState;
3597}
3598
3599void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti)
3600{
3601    int nextState = NO_TRANSITION;
3602    bool bWrongTransition = false;
3603
3604    if (m_afState == HAL_AFSTATE_INACTIVE) {
3605        switch (noti) {
3606        case AA_AFSTATE_INACTIVE:
3607        case AA_AFSTATE_ACTIVE_SCAN:
3608        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3609        case AA_AFSTATE_AF_FAILED_FOCUS:
3610        default:
3611            nextState = NO_TRANSITION;
3612            break;
3613        }
3614    }
3615    else if (m_afState == HAL_AFSTATE_STARTED) {
3616        switch (noti) {
3617        case AA_AFSTATE_INACTIVE:
3618            nextState = NO_TRANSITION;
3619            break;
3620        case AA_AFSTATE_ACTIVE_SCAN:
3621            nextState = HAL_AFSTATE_SCANNING;
3622            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
3623            break;
3624        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3625            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
3626            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
3627            break;
3628        case AA_AFSTATE_AF_FAILED_FOCUS:
3629            nextState = HAL_AFSTATE_FAILED;
3630            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3631            break;
3632        default:
3633            bWrongTransition = true;
3634            break;
3635        }
3636    }
3637    else if (m_afState == HAL_AFSTATE_SCANNING) {
3638        switch (noti) {
3639        case AA_AFSTATE_INACTIVE:
3640            bWrongTransition = true;
3641            break;
3642        case AA_AFSTATE_ACTIVE_SCAN:
3643            nextState = NO_TRANSITION;
3644            break;
3645        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3646            nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
3647            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
3648            break;
3649        case AA_AFSTATE_AF_FAILED_FOCUS:
3650            nextState = NO_TRANSITION;
3651            m_IsAfTriggerRequired = true;
3652            break;
3653        default:
3654            bWrongTransition = true;
3655            break;
3656        }
3657    }
3658    else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
3659        switch (noti) {
3660        case AA_AFSTATE_INACTIVE:
3661            bWrongTransition = true;
3662            break;
3663        case AA_AFSTATE_ACTIVE_SCAN:
3664            nextState = HAL_AFSTATE_SCANNING;
3665            SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
3666            break;
3667        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3668            nextState = NO_TRANSITION;
3669            break;
3670        case AA_AFSTATE_AF_FAILED_FOCUS:
3671            nextState = HAL_AFSTATE_FAILED;
3672            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3673            break;
3674        default:
3675            bWrongTransition = true;
3676            break;
3677        }
3678    }
3679    else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
3680        switch (noti) {
3681        case AA_AFSTATE_INACTIVE:
3682            bWrongTransition = true;
3683            break;
3684        case AA_AFSTATE_ACTIVE_SCAN:
3685            nextState = NO_TRANSITION;
3686            break;
3687        case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3688            m_IsAfLockRequired = true;
3689            nextState = HAL_AFSTATE_LOCKED;
3690            SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
3691            break;
3692        case AA_AFSTATE_AF_FAILED_FOCUS:
3693            nextState = HAL_AFSTATE_FAILED;
3694            SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
3695            break;
3696        default:
3697            bWrongTransition = true;
3698            break;
3699        }
3700    }
3701    else if (m_afState == HAL_AFSTATE_LOCKED) {
3702        switch (noti) {
3703            case AA_AFSTATE_INACTIVE:
3704                nextState = NO_TRANSITION;
3705                break;
3706            case AA_AFSTATE_ACTIVE_SCAN:
3707                bWrongTransition = true;
3708                break;
3709            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3710                nextState = NO_TRANSITION;
3711                break;
3712            case AA_AFSTATE_AF_FAILED_FOCUS:
3713            default:
3714                bWrongTransition = true;
3715                break;
3716        }
3717    }
3718    else if (m_afState == HAL_AFSTATE_FAILED) {
3719        switch (noti) {
3720            case AA_AFSTATE_INACTIVE:
3721            case AA_AFSTATE_ACTIVE_SCAN:
3722            case AA_AFSTATE_AF_ACQUIRED_FOCUS:
3723                bWrongTransition = true;
3724                break;
3725            case AA_AFSTATE_AF_FAILED_FOCUS:
3726                nextState = NO_TRANSITION;
3727                break;
3728            default:
3729                bWrongTransition = true;
3730                break;
3731        }
3732    }
3733    if (bWrongTransition) {
3734        ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
3735        return;
3736    }
3737    ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
3738    if (nextState != NO_TRANSITION)
3739        m_afState = nextState;
3740}
3741
3742void ExynosCameraHWInterface2::OnAfCancel(int id)
3743{
3744    switch (m_afMode) {
3745    case AA_AFMODE_AUTO:
3746    case AA_AFMODE_MACRO:
3747        OnAfCancelAutoMacro(id);
3748        break;
3749    case AA_AFMODE_CONTINUOUS_VIDEO:
3750        OnAfCancelCAFVideo(id);
3751        break;
3752    case AA_AFMODE_CONTINUOUS_PICTURE:
3753        OnAfCancelCAFPicture(id);
3754        break;
3755    case AA_AFMODE_OFF:
3756    default:
3757        break;
3758    }
3759}
3760
3761void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int id)
3762{
3763    int nextState = NO_TRANSITION;
3764    m_afTriggerId = id;
3765
3766    switch (m_afState) {
3767    case HAL_AFSTATE_INACTIVE:
3768        nextState = NO_TRANSITION;
3769        break;
3770    case HAL_AFSTATE_NEEDS_COMMAND:
3771    case HAL_AFSTATE_STARTED:
3772    case HAL_AFSTATE_SCANNING:
3773    case HAL_AFSTATE_LOCKED:
3774    case HAL_AFSTATE_FAILED:
3775        SetAfMode(AA_AFMODE_OFF);
3776        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
3777        nextState = HAL_AFSTATE_INACTIVE;
3778        break;
3779    default:
3780        break;
3781    }
3782    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
3783    if (nextState != NO_TRANSITION)
3784        m_afState = nextState;
3785}
3786
3787void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int id)
3788{
3789    int nextState = NO_TRANSITION;
3790    m_afTriggerId = id;
3791
3792    switch (m_afState) {
3793    case HAL_AFSTATE_INACTIVE:
3794        nextState = NO_TRANSITION;
3795        break;
3796    case HAL_AFSTATE_NEEDS_COMMAND:
3797    case HAL_AFSTATE_STARTED:
3798    case HAL_AFSTATE_SCANNING:
3799    case HAL_AFSTATE_LOCKED:
3800    case HAL_AFSTATE_FAILED:
3801    case HAL_AFSTATE_NEEDS_DETERMINATION:
3802    case HAL_AFSTATE_PASSIVE_FOCUSED:
3803        SetAfMode(AA_AFMODE_OFF);
3804        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
3805        SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE);
3806        nextState = HAL_AFSTATE_INACTIVE;
3807        break;
3808    default:
3809        break;
3810    }
3811    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
3812    if (nextState != NO_TRANSITION)
3813        m_afState = nextState;
3814}
3815
3816void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int id)
3817{
3818    int nextState = NO_TRANSITION;
3819    m_afTriggerId = id;
3820
3821    switch (m_afState) {
3822    case HAL_AFSTATE_INACTIVE:
3823        nextState = NO_TRANSITION;
3824        break;
3825    case HAL_AFSTATE_NEEDS_COMMAND:
3826    case HAL_AFSTATE_STARTED:
3827    case HAL_AFSTATE_SCANNING:
3828    case HAL_AFSTATE_LOCKED:
3829    case HAL_AFSTATE_FAILED:
3830    case HAL_AFSTATE_NEEDS_DETERMINATION:
3831    case HAL_AFSTATE_PASSIVE_FOCUSED:
3832        SetAfMode(AA_AFMODE_OFF);
3833        SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
3834        SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO);
3835        nextState = HAL_AFSTATE_INACTIVE;
3836        break;
3837    default:
3838        break;
3839    }
3840    ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
3841    if (nextState != NO_TRANSITION)
3842        m_afState = nextState;
3843}
3844
3845void ExynosCameraHWInterface2::SetAfStateForService(int newState)
3846{
3847    m_serviceAfState = newState;
3848    m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie);
3849}
3850
3851int ExynosCameraHWInterface2::GetAfStateForService()
3852{
3853   return m_serviceAfState;
3854}
3855
3856void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode)
3857{
3858    if (m_afMode != afMode) {
3859        if (m_IsAfModeUpdateRequired) {
3860            m_afMode2 = afMode;
3861            ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode);
3862        }
3863        else {
3864            ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode);
3865            m_IsAfModeUpdateRequired = true;
3866            m_afMode = afMode;
3867        }
3868    }
3869}
3870
3871void ExynosCameraHWInterface2::m_setExifFixedAttribute(void)
3872{
3873    char property[PROPERTY_VALUE_MAX];
3874
3875    //2 0th IFD TIFF Tags
3876#if 0 // STOPSHIP TODO(aray): remove before launch, but for now don't leak product data
3877    //3 Maker
3878    property_get("ro.product.brand", property, EXIF_DEF_MAKER);
3879    strncpy((char *)mExifInfo.maker, property,
3880                sizeof(mExifInfo.maker) - 1);
3881    mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0';
3882    //3 Model
3883    property_get("ro.product.model", property, EXIF_DEF_MODEL);
3884    strncpy((char *)mExifInfo.model, property,
3885                sizeof(mExifInfo.model) - 1);
3886    mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0';
3887    //3 Software
3888    property_get("ro.build.id", property, EXIF_DEF_SOFTWARE);
3889    strncpy((char *)mExifInfo.software, property,
3890                sizeof(mExifInfo.software) - 1);
3891    mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0';
3892#endif
3893
3894    //3 YCbCr Positioning
3895    mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING;
3896
3897    //2 0th IFD Exif Private Tags
3898    //3 F Number
3899    mExifInfo.fnumber.num = EXIF_DEF_FNUMBER_NUM;
3900    mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN;
3901    //3 Exposure Program
3902    mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM;
3903    //3 Exif Version
3904    memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version));
3905    //3 Aperture
3906    uint32_t av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den);
3907    mExifInfo.aperture.num = av*EXIF_DEF_APEX_DEN;
3908    mExifInfo.aperture.den = EXIF_DEF_APEX_DEN;
3909    //3 Maximum lens aperture
3910    mExifInfo.max_aperture.num = mExifInfo.aperture.num;
3911    mExifInfo.max_aperture.den = mExifInfo.aperture.den;
3912    //3 Lens Focal Length
3913    mExifInfo.focal_length.num = EXIF_DEF_FOCAL_LEN_NUM;
3914    mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN;
3915    //3 User Comments
3916    strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS);
3917    //3 Color Space information
3918    mExifInfo.color_space = EXIF_DEF_COLOR_SPACE;
3919    //3 Exposure Mode
3920    mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE;
3921
3922    //2 0th IFD GPS Info Tags
3923    unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 };
3924    memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version));
3925
3926    //2 1th IFD TIFF Tags
3927    mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION;
3928    mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM;
3929    mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN;
3930    mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM;
3931    mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN;
3932    mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT;
3933}
3934
3935void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect,
3936	camera2_shot *currentEntry)
3937{
3938    camera2_dm *dm = &(currentEntry->dm);
3939    camera2_ctl *ctl = &(currentEntry->ctl);
3940
3941    ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue );
3942    if (!ctl->request.frameCount)
3943       return;
3944    //2 0th IFD TIFF Tags
3945    //3 Width
3946    exifInfo->width = rect->w;
3947    //3 Height
3948    exifInfo->height = rect->h;
3949    //3 Orientation
3950    switch (ctl->jpeg.orientation) {
3951    case 90:
3952        exifInfo->orientation = EXIF_ORIENTATION_90;
3953        break;
3954    case 180:
3955        exifInfo->orientation = EXIF_ORIENTATION_180;
3956        break;
3957    case 270:
3958        exifInfo->orientation = EXIF_ORIENTATION_270;
3959        break;
3960    case 0:
3961    default:
3962        exifInfo->orientation = EXIF_ORIENTATION_UP;
3963        break;
3964    }
3965
3966    //3 Date time
3967    time_t rawtime;
3968    struct tm *timeinfo;
3969    time(&rawtime);
3970    timeinfo = localtime(&rawtime);
3971    strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo);
3972
3973    //2 0th IFD Exif Private Tags
3974    //3 Exposure Time
3975    int shutterSpeed = (dm->sensor.exposureTime/1000);
3976
3977    if (shutterSpeed < 0) {
3978        shutterSpeed = 100;
3979    }
3980
3981    exifInfo->exposure_time.num = 1;
3982    // x us -> 1/x s */
3983    //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed);
3984    exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed);
3985
3986    //3 ISO Speed Rating
3987    exifInfo->iso_speed_rating = dm->aa.isoValue;
3988
3989    uint32_t av, tv, bv, sv, ev;
3990    av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den);
3991    tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den);
3992    sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating);
3993    bv = av + tv - sv;
3994    ev = av + tv;
3995    //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating);
3996    ALOGD("AV=%d, TV=%d, SV=%d", av, tv, sv);
3997
3998    //3 Shutter Speed
3999    exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN;
4000    exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN;
4001    //3 Brightness
4002    exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN;
4003    exifInfo->brightness.den = EXIF_DEF_APEX_DEN;
4004    //3 Exposure Bias
4005    if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH||
4006        ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) {
4007        exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN;
4008        exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN;
4009    } else {
4010        exifInfo->exposure_bias.num = 0;
4011        exifInfo->exposure_bias.den = 0;
4012    }
4013    //3 Metering Mode
4014    /*switch (m_curCameraInfo->metering) {
4015    case METERING_MODE_CENTER:
4016        exifInfo->metering_mode = EXIF_METERING_CENTER;
4017        break;
4018    case METERING_MODE_MATRIX:
4019        exifInfo->metering_mode = EXIF_METERING_MULTISPOT;
4020        break;
4021    case METERING_MODE_SPOT:
4022        exifInfo->metering_mode = EXIF_METERING_SPOT;
4023        break;
4024    case METERING_MODE_AVERAGE:
4025    default:
4026        exifInfo->metering_mode = EXIF_METERING_AVERAGE;
4027        break;
4028    }*/
4029    exifInfo->metering_mode = EXIF_METERING_CENTER;
4030
4031    //3 Flash
4032    int flash = dm->flash.flashMode;
4033    if (dm->flash.flashMode == FLASH_MODE_OFF || flash < 0)
4034        exifInfo->flash = EXIF_DEF_FLASH;
4035    else
4036        exifInfo->flash = flash;
4037
4038    //3 White Balance
4039    if (dm->aa.awbMode == AA_AWBMODE_WB_AUTO)
4040        exifInfo->white_balance = EXIF_WB_AUTO;
4041    else
4042        exifInfo->white_balance = EXIF_WB_MANUAL;
4043
4044    //3 Scene Capture Type
4045    switch (ctl->aa.sceneMode) {
4046    case AA_SCENE_MODE_PORTRAIT:
4047        exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT;
4048        break;
4049    case AA_SCENE_MODE_LANDSCAPE:
4050        exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE;
4051        break;
4052    case AA_SCENE_MODE_NIGHT_PORTRAIT:
4053        exifInfo->scene_capture_type = EXIF_SCENE_NIGHT;
4054        break;
4055    default:
4056        exifInfo->scene_capture_type = EXIF_SCENE_STANDARD;
4057        break;
4058    }
4059
4060    //2 0th IFD GPS Info Tags
4061    if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) {
4062
4063        if (ctl->jpeg.gpsCoordinates[0] > 0)
4064            strcpy((char *)exifInfo->gps_latitude_ref, "N");
4065        else
4066            strcpy((char *)exifInfo->gps_latitude_ref, "S");
4067
4068        if (ctl->jpeg.gpsCoordinates[1] > 0)
4069            strcpy((char *)exifInfo->gps_longitude_ref, "E");
4070        else
4071            strcpy((char *)exifInfo->gps_longitude_ref, "W");
4072
4073        if (ctl->jpeg.gpsCoordinates[2] > 0)
4074            exifInfo->gps_altitude_ref = 0;
4075        else
4076            exifInfo->gps_altitude_ref = 1;
4077
4078        double latitude = fabs(ctl->jpeg.gpsCoordinates[0] / 10000.0);
4079        double longitude = fabs(ctl->jpeg.gpsCoordinates[1] / 10000.0);
4080        double altitude = fabs(ctl->jpeg.gpsCoordinates[2] / 100.0);
4081
4082        exifInfo->gps_latitude[0].num = (uint32_t)latitude;
4083        exifInfo->gps_latitude[0].den = 1;
4084        exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60);
4085        exifInfo->gps_latitude[1].den = 1;
4086        exifInfo->gps_latitude[2].num = (uint32_t)((((latitude - exifInfo->gps_latitude[0].num) * 60)
4087                                        - exifInfo->gps_latitude[1].num) * 60);
4088        exifInfo->gps_latitude[2].den = 1;
4089
4090        exifInfo->gps_longitude[0].num = (uint32_t)longitude;
4091        exifInfo->gps_longitude[0].den = 1;
4092        exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60);
4093        exifInfo->gps_longitude[1].den = 1;
4094        exifInfo->gps_longitude[2].num = (uint32_t)((((longitude - exifInfo->gps_longitude[0].num) * 60)
4095                                        - exifInfo->gps_longitude[1].num) * 60);
4096        exifInfo->gps_longitude[2].den = 1;
4097
4098        exifInfo->gps_altitude.num = (uint32_t)altitude;
4099        exifInfo->gps_altitude.den = 1;
4100
4101        struct tm tm_data;
4102        long timestamp;
4103        timestamp = (long)ctl->jpeg.gpsTimestamp;
4104        gmtime_r(&timestamp, &tm_data);
4105        exifInfo->gps_timestamp[0].num = tm_data.tm_hour;
4106        exifInfo->gps_timestamp[0].den = 1;
4107        exifInfo->gps_timestamp[1].num = tm_data.tm_min;
4108        exifInfo->gps_timestamp[1].den = 1;
4109        exifInfo->gps_timestamp[2].num = tm_data.tm_sec;
4110        exifInfo->gps_timestamp[2].den = 1;
4111        snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp),
4112                "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday);
4113
4114        exifInfo->enableGps = true;
4115    } else {
4116        exifInfo->enableGps = false;
4117    }
4118
4119    //2 1th IFD TIFF Tags
4120    exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0];
4121    exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1];
4122}
4123
4124ExynosCameraHWInterface2::MainThread::~MainThread()
4125{
4126    ALOGV("(%s):", __FUNCTION__);
4127}
4128
4129void ExynosCameraHWInterface2::MainThread::release()
4130{
4131    ALOGV("(%s):", __func__);
4132    SetSignal(SIGNAL_THREAD_RELEASE);
4133}
4134
4135ExynosCameraHWInterface2::SensorThread::~SensorThread()
4136{
4137    ALOGV("(%s):", __FUNCTION__);
4138}
4139
4140void ExynosCameraHWInterface2::SensorThread::release()
4141{
4142    ALOGV("(%s):", __func__);
4143    SetSignal(SIGNAL_THREAD_RELEASE);
4144}
4145
4146ExynosCameraHWInterface2::IspThread::~IspThread()
4147{
4148    ALOGV("(%s):", __FUNCTION__);
4149}
4150
4151void ExynosCameraHWInterface2::IspThread::release()
4152{
4153    ALOGV("(%s):", __func__);
4154    SetSignal(SIGNAL_THREAD_RELEASE);
4155}
4156
4157ExynosCameraHWInterface2::StreamThread::~StreamThread()
4158{
4159    ALOGV("(%s):", __FUNCTION__);
4160}
4161
4162void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
4163{
4164    ALOGV("DEBUG(%s):", __FUNCTION__);
4165
4166    m_tempParameters = new_parameters;
4167
4168    SetSignal(SIGNAL_STREAM_CHANGE_PARAMETER);
4169
4170    // TODO : return synchronously (after setting parameters asynchronously)
4171    usleep(2000);
4172}
4173
4174void ExynosCameraHWInterface2::StreamThread::applyChange()
4175{
4176    memcpy(&m_parameters, m_tempParameters, sizeof(stream_parameters_t));
4177
4178    ALOGV("DEBUG(%s):  Applying Stream paremeters  width(%d), height(%d)",
4179            __FUNCTION__, m_parameters.outputWidth, m_parameters.outputHeight);
4180}
4181
4182void ExynosCameraHWInterface2::StreamThread::release()
4183{
4184    ALOGV("(%s):", __func__);
4185    SetSignal(SIGNAL_THREAD_RELEASE);
4186}
4187
4188int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr)
4189{
4190    int index;
4191    for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
4192        if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr)
4193            return index;
4194    }
4195    return -1;
4196}
4197
4198void ExynosCameraHWInterface2::StreamThread::setRecordingParameter(record_parameters_t * recordParm)
4199{
4200    memcpy(&m_recordParameters, recordParm, sizeof(record_parameters_t));
4201}
4202
4203int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
4204{
4205    if (ionClient == 0) {
4206        ionClient = ion_client_create();
4207        if (ionClient < 0) {
4208            ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient);
4209            return 0;
4210        }
4211    }
4212
4213    return ionClient;
4214}
4215
4216int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient)
4217{
4218    if (ionClient != 0) {
4219        if (ionClient > 0) {
4220            ion_client_destroy(ionClient);
4221        }
4222        ionClient = 0;
4223    }
4224
4225    return ionClient;
4226}
4227
4228int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum)
4229{
4230    int ret = 0;
4231    int i = 0;
4232
4233    if (ionClient == 0) {
4234        ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient);
4235        return -1;
4236    }
4237
4238    for (i=0;i<iMemoryNum;i++) {
4239        if (buf->size.extS[i] == 0) {
4240            break;
4241        }
4242
4243        buf->fd.extFd[i] = ion_alloc(ionClient, \
4244                                      buf->size.extS[i], 0, ION_HEAP_EXYNOS_MASK,0);
4245        if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) {
4246            ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]);
4247            buf->fd.extFd[i] = -1;
4248            freeCameraMemory(buf, iMemoryNum);
4249            return -1;
4250        }
4251
4252        buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \
4253                                        buf->size.extS[i], 0);
4254        if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) {
4255            ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]);
4256            buf->virt.extP[i] = (char *)MAP_FAILED;
4257            freeCameraMemory(buf, iMemoryNum);
4258            return -1;
4259        }
4260        ALOGV("allocCameraMem : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]);
4261    }
4262
4263    return ret;
4264}
4265
4266void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum)
4267{
4268
4269    int i =0 ;
4270    int ret = 0;
4271
4272    for (i=0;i<iMemoryNum;i++) {
4273        if (buf->fd.extFd[i] != -1) {
4274            if (buf->virt.extP[i] != (char *)MAP_FAILED) {
4275                ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
4276                if (ret < 0)
4277                    ALOGE("ERR(%s)", __FUNCTION__);
4278            }
4279            ion_free(buf->fd.extFd[i]);
4280        }
4281        buf->fd.extFd[i] = -1;
4282        buf->virt.extP[i] = (char *)MAP_FAILED;
4283        buf->size.extS[i] = 0;
4284    }
4285}
4286
4287void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum)
4288{
4289    int i =0 ;
4290    for (i=0;i<iMemoryNum;i++) {
4291        buf->virt.extP[i] = (char *)MAP_FAILED;
4292        buf->fd.extFd[i] = -1;
4293        buf->size.extS[i] = 0;
4294    }
4295}
4296
4297
4298
4299
4300static camera2_device_t *g_cam2_device = NULL;
4301static bool g_camera_vaild = false;
4302ExynosCamera2 * g_camera2[2] = { NULL, NULL };
4303
4304static int HAL2_camera_device_close(struct hw_device_t* device)
4305{
4306    ALOGV("%s: ENTER", __FUNCTION__);
4307    if (device) {
4308
4309        camera2_device_t *cam_device = (camera2_device_t *)device;
4310        ALOGV("cam_device(0x%08x):", (unsigned int)cam_device);
4311        ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
4312        delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
4313        g_cam2_device = NULL;
4314        free(cam_device);
4315        g_camera_vaild = false;
4316    }
4317    if (g_camera2[0] != NULL) {
4318        delete static_cast<ExynosCamera2 *>(g_camera2[0]);
4319        g_camera2[0] = NULL;
4320    }
4321
4322    if (g_camera2[1] != NULL) {
4323        delete static_cast<ExynosCamera2 *>(g_camera2[1]);
4324        g_camera2[1] = NULL;
4325    }
4326
4327    ALOGV("%s: EXIT", __FUNCTION__);
4328    return 0;
4329}
4330
4331static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev)
4332{
4333    return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv);
4334}
4335
4336static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev,
4337            const camera2_request_queue_src_ops_t *request_src_ops)
4338{
4339    ALOGV("DEBUG(%s):", __FUNCTION__);
4340    return obj(dev)->setRequestQueueSrcOps(request_src_ops);
4341}
4342
4343static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev)
4344{
4345    ALOGV("DEBUG(%s):", __FUNCTION__);
4346    return obj(dev)->notifyRequestQueueNotEmpty();
4347}
4348
4349static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev,
4350            const camera2_frame_queue_dst_ops_t *frame_dst_ops)
4351{
4352    ALOGV("DEBUG(%s):", __FUNCTION__);
4353    return obj(dev)->setFrameQueueDstOps(frame_dst_ops);
4354}
4355
4356static int HAL2_device_get_in_progress_count(const struct camera2_device *dev)
4357{
4358    ALOGV("DEBUG(%s):", __FUNCTION__);
4359    return obj(dev)->getInProgressCount();
4360}
4361
4362static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev)
4363{
4364    ALOGV("DEBUG(%s):", __FUNCTION__);
4365    return obj(dev)->flushCapturesInProgress();
4366}
4367
4368static int HAL2_device_construct_default_request(const struct camera2_device *dev,
4369            int request_template, camera_metadata_t **request)
4370{
4371    ALOGV("DEBUG(%s):", __FUNCTION__);
4372    return obj(dev)->constructDefaultRequest(request_template, request);
4373}
4374
4375static int HAL2_device_allocate_stream(
4376            const struct camera2_device *dev,
4377            // inputs
4378            uint32_t width,
4379            uint32_t height,
4380            int      format,
4381            const camera2_stream_ops_t *stream_ops,
4382            // outputs
4383            uint32_t *stream_id,
4384            uint32_t *format_actual,
4385            uint32_t *usage,
4386            uint32_t *max_buffers)
4387{
4388    ALOGV("(%s): ", __FUNCTION__);
4389    return obj(dev)->allocateStream(width, height, format, stream_ops,
4390                                    stream_id, format_actual, usage, max_buffers);
4391}
4392
4393
4394static int HAL2_device_register_stream_buffers(const struct camera2_device *dev,
4395            uint32_t stream_id,
4396            int num_buffers,
4397            buffer_handle_t *buffers)
4398{
4399    ALOGV("DEBUG(%s):", __FUNCTION__);
4400    return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers);
4401}
4402
4403static int HAL2_device_release_stream(
4404        const struct camera2_device *dev,
4405            uint32_t stream_id)
4406{
4407    ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
4408    if (!g_camera_vaild)
4409        return 0;
4410    return obj(dev)->releaseStream(stream_id);
4411}
4412
4413static int HAL2_device_allocate_reprocess_stream(
4414           const struct camera2_device *dev,
4415            uint32_t width,
4416            uint32_t height,
4417            uint32_t format,
4418            const camera2_stream_in_ops_t *reprocess_stream_ops,
4419            // outputs
4420            uint32_t *stream_id,
4421            uint32_t *consumer_usage,
4422            uint32_t *max_buffers)
4423{
4424    ALOGV("DEBUG(%s):", __FUNCTION__);
4425    return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops,
4426                                    stream_id, consumer_usage, max_buffers);
4427}
4428
4429static int HAL2_device_release_reprocess_stream(
4430        const struct camera2_device *dev,
4431            uint32_t stream_id)
4432{
4433    ALOGV("DEBUG(%s):", __FUNCTION__);
4434    return obj(dev)->releaseReprocessStream(stream_id);
4435}
4436
4437static int HAL2_device_trigger_action(const struct camera2_device *dev,
4438           uint32_t trigger_id,
4439            int ext1,
4440            int ext2)
4441{
4442    ALOGV("DEBUG(%s):", __FUNCTION__);
4443    return obj(dev)->triggerAction(trigger_id, ext1, ext2);
4444}
4445
4446static int HAL2_device_set_notify_callback(const struct camera2_device *dev,
4447            camera2_notify_callback notify_cb,
4448            void *user)
4449{
4450    ALOGV("DEBUG(%s):", __FUNCTION__);
4451    return obj(dev)->setNotifyCallback(notify_cb, user);
4452}
4453
4454static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev,
4455            vendor_tag_query_ops_t **ops)
4456{
4457    ALOGV("DEBUG(%s):", __FUNCTION__);
4458    return obj(dev)->getMetadataVendorTagOps(ops);
4459}
4460
4461static int HAL2_device_dump(const struct camera2_device *dev, int fd)
4462{
4463    ALOGV("DEBUG(%s):", __FUNCTION__);
4464    return obj(dev)->dump(fd);
4465}
4466
4467
4468
4469
4470
4471static int HAL2_getNumberOfCameras()
4472{
4473    ALOGV("(%s): returning 2", __FUNCTION__);
4474    return 2;
4475}
4476
4477
4478static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
4479{
4480    ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
4481    static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
4482
4483    status_t res;
4484
4485    if (cameraId == 0) {
4486        info->facing = CAMERA_FACING_BACK;
4487        if (!g_camera2[0])
4488            g_camera2[0] = new ExynosCamera2(0);
4489    }
4490    else if (cameraId == 1) {
4491        info->facing = CAMERA_FACING_FRONT;
4492        if (!g_camera2[1])
4493            g_camera2[1] = new ExynosCamera2(1);
4494    }
4495    else
4496        return BAD_VALUE;
4497
4498    info->orientation = 0;
4499    info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0);
4500    if (mCameraInfo[cameraId] == NULL) {
4501        res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true);
4502        if (res != OK) {
4503            ALOGE("%s: Unable to allocate static info: %s (%d)",
4504                    __FUNCTION__, strerror(-res), res);
4505            return res;
4506        }
4507        res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false);
4508        if (res != OK) {
4509            ALOGE("%s: Unable to fill in static info: %s (%d)",
4510                    __FUNCTION__, strerror(-res), res);
4511            return res;
4512        }
4513    }
4514    info->static_camera_characteristics = mCameraInfo[cameraId];
4515    return NO_ERROR;
4516}
4517
4518#define SET_METHOD(m) m : HAL2_device_##m
4519
4520static camera2_device_ops_t camera2_device_ops = {
4521        SET_METHOD(set_request_queue_src_ops),
4522        SET_METHOD(notify_request_queue_not_empty),
4523        SET_METHOD(set_frame_queue_dst_ops),
4524        SET_METHOD(get_in_progress_count),
4525        SET_METHOD(flush_captures_in_progress),
4526        SET_METHOD(construct_default_request),
4527        SET_METHOD(allocate_stream),
4528        SET_METHOD(register_stream_buffers),
4529        SET_METHOD(release_stream),
4530        SET_METHOD(allocate_reprocess_stream),
4531        SET_METHOD(release_reprocess_stream),
4532        SET_METHOD(trigger_action),
4533        SET_METHOD(set_notify_callback),
4534        SET_METHOD(get_metadata_vendor_tag_ops),
4535        SET_METHOD(dump),
4536};
4537
4538#undef SET_METHOD
4539
4540
4541static int HAL2_camera_device_open(const struct hw_module_t* module,
4542                                  const char *id,
4543                                  struct hw_device_t** device)
4544{
4545
4546
4547    int cameraId = atoi(id);
4548
4549    g_camera_vaild = false;
4550    ALOGV("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
4551    if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
4552        ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
4553        return -EINVAL;
4554    }
4555
4556    ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
4557    if (g_cam2_device) {
4558        if (obj(g_cam2_device)->getCameraId() == cameraId) {
4559            ALOGV("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
4560            goto done;
4561        } else {
4562
4563            while (g_cam2_device)
4564                usleep(10000);
4565        }
4566    }
4567
4568    g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
4569    ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
4570
4571    if (!g_cam2_device)
4572        return -ENOMEM;
4573
4574    g_cam2_device->common.tag     = HARDWARE_DEVICE_TAG;
4575    g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0;
4576    g_cam2_device->common.module  = const_cast<hw_module_t *>(module);
4577    g_cam2_device->common.close   = HAL2_camera_device_close;
4578
4579    g_cam2_device->ops = &camera2_device_ops;
4580
4581    ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id);
4582
4583    g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId]);
4584
4585done:
4586    *device = (hw_device_t *)g_cam2_device;
4587    ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device);
4588    g_camera_vaild = true;
4589
4590    return 0;
4591}
4592
4593
4594static hw_module_methods_t camera_module_methods = {
4595            open : HAL2_camera_device_open
4596};
4597
4598extern "C" {
4599    struct camera_module HAL_MODULE_INFO_SYM = {
4600      common : {
4601          tag                : HARDWARE_MODULE_TAG,
4602          module_api_version : CAMERA_MODULE_API_VERSION_2_0,
4603          hal_api_version    : HARDWARE_HAL_API_VERSION,
4604          id                 : CAMERA_HARDWARE_MODULE_ID,
4605          name               : "Exynos Camera HAL2",
4606          author             : "Samsung Corporation",
4607          methods            : &camera_module_methods,
4608          dso:                NULL,
4609          reserved:           {0},
4610      },
4611      get_number_of_cameras : HAL2_getNumberOfCameras,
4612      get_camera_info       : HAL2_getCameraInfo
4613    };
4614}
4615
4616}; // namespace android
4617