ExynosCameraHWInterface2.cpp revision 9dd63e1fc352306d6680c517b7ce9936683c78c4
1/*
2**
3** Copyright 2008, The Android Open Source Project
4** Copyright 2012, Samsung Electronics Co. LTD
5**
6** Licensed under the Apache License, Version 2.0 (the "License");
7** you may not use this file except in compliance with the License.
8** You may obtain a copy of the License at
9**
10**     http://www.apache.org/licenses/LICENSE-2.0
11**
12** Unless required by applicable law or agreed to in writing, software
13** distributed under the License is distributed on an "AS IS" BASIS,
14** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15** See the License for the specific language governing permissions and
16** limitations under the License.
17*/
18
19/*!
20 * \file      ExynosCameraHWInterface2.cpp
21 * \brief     source file for Android Camera API 2.0 HAL
22 * \author    Sungjoong Kang(sj3.kang@samsung.com)
23 * \date      2012/07/10
24 *
25 * <b>Revision History: </b>
26 * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
27 *   Initial Release
28 *
29 * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
30 *   2nd Release
31 *
32 */
33
34//#define LOG_NDEBUG 0
35#define LOG_TAG "ExynosCameraHAL2"
36#include <utils/Log.h>
37
38#include "ExynosCameraHWInterface2.h"
39#include "exynos_format.h"
40
41
42
43namespace android {
44
45
46// temporarily copied from EmulatedFakeCamera2
47// TODO : implement our own codes
48status_t constructDefaultRequestInternal(
49        int request_template,
50        camera_metadata_t **request,
51        bool sizeRequest);
52
53status_t constructStaticInfo(
54        camera_metadata_t **info,
55        int cameraId,
56        bool sizeRequest);
57
58bool isSupportedPreviewSize(int m_cameraId, int width, int height);
59bool isSupportedJpegSize(int m_cameraId, int width, int height);
60int getSccOutputSizeX(int cameraId);
61int getSccOutputSizeY(int cameraId);
62int getSensorOutputSizeX(int cameraId);
63int getSensorOutputSizeY(int cameraId);
64int getJpegOutputSizeX(int cameraId);
65int getJpegOutputSizeY(int cameraId);
66
67void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
68{
69    int nw;
70    int cnt = 0;
71    uint32_t written = 0;
72
73    ALOGD("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
74    int fd = open(fname, O_RDWR | O_CREAT, 0644);
75    if (fd < 0) {
76        ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
77        return;
78    }
79
80    ALOGD("writing %d bytes to file [%s]", size, fname);
81    while (written < size) {
82        nw = ::write(fd, buf + written, size - written);
83        if (nw < 0) {
84            ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
85            break;
86        }
87        written += nw;
88        cnt++;
89    }
90    ALOGD("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
91    ::close(fd);
92}
93
94int get_pixel_depth(uint32_t fmt)
95{
96    int depth = 0;
97
98    switch (fmt) {
99    case V4L2_PIX_FMT_JPEG:
100        depth = 8;
101        break;
102
103    case V4L2_PIX_FMT_NV12:
104    case V4L2_PIX_FMT_NV21:
105    case V4L2_PIX_FMT_YUV420:
106    case V4L2_PIX_FMT_YVU420M:
107    case V4L2_PIX_FMT_NV12M:
108    case V4L2_PIX_FMT_NV12MT:
109        depth = 12;
110        break;
111
112    case V4L2_PIX_FMT_RGB565:
113    case V4L2_PIX_FMT_YUYV:
114    case V4L2_PIX_FMT_YVYU:
115    case V4L2_PIX_FMT_UYVY:
116    case V4L2_PIX_FMT_VYUY:
117    case V4L2_PIX_FMT_NV16:
118    case V4L2_PIX_FMT_NV61:
119    case V4L2_PIX_FMT_YUV422P:
120    case V4L2_PIX_FMT_SBGGR10:
121    case V4L2_PIX_FMT_SBGGR12:
122    case V4L2_PIX_FMT_SBGGR16:
123        depth = 16;
124        break;
125
126    case V4L2_PIX_FMT_RGB32:
127        depth = 32;
128        break;
129    default:
130        ALOGE("Get depth failed(format : %d)", fmt);
131        break;
132    }
133
134    return depth;
135}
136
137int cam_int_s_fmt(node_info_t *node)
138{
139    struct v4l2_format v4l2_fmt;
140    unsigned int framesize;
141    int ret;
142
143    memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
144
145    v4l2_fmt.type = node->type;
146    framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
147
148    if (node->planes >= 1) {
149        v4l2_fmt.fmt.pix_mp.width       = node->width;
150        v4l2_fmt.fmt.pix_mp.height      = node->height;
151        v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
152        v4l2_fmt.fmt.pix_mp.field       = V4L2_FIELD_ANY;
153    } else {
154        ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
155    }
156
157    /* Set up for capture */
158    ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
159
160    if (ret < 0)
161        ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
162
163    return ret;
164}
165
166int cam_int_reqbufs(node_info_t *node)
167{
168    struct v4l2_requestbuffers req;
169    int ret;
170
171    req.count = node->buffers;
172    req.type = node->type;
173    req.memory = node->memory;
174
175    ret = exynos_v4l2_reqbufs(node->fd, &req);
176
177    if (ret < 0)
178        ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
179
180    return req.count;
181}
182
183int cam_int_qbuf(node_info_t *node, int index)
184{
185    struct v4l2_buffer v4l2_buf;
186    struct v4l2_plane planes[VIDEO_MAX_PLANES];
187    int i;
188    int ret = 0;
189
190    v4l2_buf.m.planes   = planes;
191    v4l2_buf.type       = node->type;
192    v4l2_buf.memory     = node->memory;
193    v4l2_buf.index      = index;
194    v4l2_buf.length     = node->planes;
195
196    for(i = 0; i < node->planes; i++){
197        v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
198        v4l2_buf.m.planes[i].length  = (unsigned long)(node->buffer[index].size.extS[i]);
199    }
200
201    ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
202
203    if (ret < 0)
204        ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
205
206    return ret;
207}
208
209int cam_int_streamon(node_info_t *node)
210{
211    enum v4l2_buf_type type = node->type;
212    int ret;
213
214    ret = exynos_v4l2_streamon(node->fd, type);
215
216    if (ret < 0)
217        ALOGE("%s: VIDIOC_STREAMON failed (%d)",__FUNCTION__, ret);
218
219    ALOGV("On streaming I/O... ... fd(%d)", node->fd);
220
221    return ret;
222}
223
224int cam_int_streamoff(node_info_t *node)
225{
226	enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
227	int ret;
228
229	ALOGV("Off streaming I/O... fd(%d)", node->fd);
230	ret = exynos_v4l2_streamoff(node->fd, type);
231
232    if (ret < 0)
233        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
234
235	return ret;
236}
237
238int isp_int_streamoff(node_info_t *node)
239{
240	enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
241	int ret;
242
243	ALOGV("Off streaming I/O... fd(%d)", node->fd);
244	ret = exynos_v4l2_streamoff(node->fd, type);
245
246    if (ret < 0)
247        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
248
249	return ret;
250}
251
252int cam_int_dqbuf(node_info_t *node)
253{
254    struct v4l2_buffer v4l2_buf;
255    struct v4l2_plane planes[VIDEO_MAX_PLANES];
256    int ret;
257
258    v4l2_buf.type       = node->type;
259    v4l2_buf.memory     = node->memory;
260    v4l2_buf.m.planes   = planes;
261    v4l2_buf.length     = node->planes;
262
263    ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
264    if (ret < 0)
265        ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
266
267    return v4l2_buf.index;
268}
269
270int cam_int_s_input(node_info_t *node, int index)
271{
272    int ret;
273
274    ret = exynos_v4l2_s_input(node->fd, index);
275    if (ret < 0)
276        ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
277
278    return ret;
279}
280
281
282gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
283
284RequestManager::RequestManager(SignalDrivenThread* main_thread):
285    m_numOfEntries(0),
286    m_entryInsertionIndex(0),
287    m_entryProcessingIndex(0),
288    m_entryFrameOutputIndex(0)
289{
290    m_metadataConverter = new MetadataConverter;
291    m_mainThread = main_thread;
292    for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
293        memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
294        entries[i].internal_shot.ctl.request.frameCount = -1;
295    }
296    tempInitialSkipCnt = 8;
297    return;
298}
299
300RequestManager::~RequestManager()
301{
302    return;
303}
304
305int RequestManager::GetNumEntries()
306{
307    return m_numOfEntries;
308}
309
310void RequestManager::SetDefaultParameters(int cropX)
311{
312    m_cropX = cropX;
313}
314
315bool RequestManager::IsRequestQueueFull()
316{
317    Mutex::Autolock lock(m_requestMutex);
318    if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
319        return true;
320    else
321        return false;
322}
323
324void RequestManager::RegisterRequest(camera_metadata_t * new_request)
325{
326    ALOGV("DEBUG(%s):", __FUNCTION__);
327
328    Mutex::Autolock lock(m_requestMutex);
329
330    request_manager_entry * newEntry = NULL;
331    int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
332    ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex,m_numOfEntries );
333
334
335    newEntry = &(entries[newInsertionIndex]);
336
337    if (newEntry->status!=EMPTY) {
338        ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
339        return;
340    }
341    newEntry->status = REGISTERED;
342    newEntry->original_request = new_request;
343    // TODO : allocate internal_request dynamically
344    m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
345    newEntry->output_stream_count = newEntry->internal_shot.ctl.request.numOutputStream;
346
347    m_numOfEntries++;
348    m_entryInsertionIndex = newInsertionIndex;
349
350
351    // Dump();
352    ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
353    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.ctl.request.frameCount);
354}
355
356void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
357{
358    ALOGV("DEBUG(%s):", __FUNCTION__);
359    Mutex::Autolock lock(m_requestMutex);
360
361    request_manager_entry * currentEntry =  &(entries[m_entryFrameOutputIndex]);
362
363    if (currentEntry->status!=PROCESSING) {
364        ALOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__
365        , m_entryProcessingIndex, m_entryFrameOutputIndex,(int)(currentEntry->status));
366        return;
367    }
368    if (deregistered_request)  *deregistered_request = currentEntry->original_request;
369
370    currentEntry->status = EMPTY;
371    currentEntry->original_request = NULL;
372    memset(&(currentEntry->internal_shot), 0, sizeof(camera2_ctl_metadata_NEW_t));
373    currentEntry->internal_shot.ctl.request.frameCount = -1;
374    currentEntry->output_stream_count = 0;
375    currentEntry->dynamic_meta_vaild = false;
376    m_numOfEntries--;
377    // Dump();
378    ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
379     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
380
381    return;
382}
383
384bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
385                camera_metadata_t ** prepared_frame)
386{
387    ALOGV("DEBUG(%s):", __FUNCTION__);
388    Mutex::Autolock lock(m_requestMutex);
389    status_t res = NO_ERROR;
390    int tempFrameOutputIndex = GetNextIndex(m_entryFrameOutputIndex);
391    request_manager_entry * currentEntry =  &(entries[tempFrameOutputIndex]);
392    ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
393        m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
394
395    if (m_completedIndex != tempFrameOutputIndex) {
396        ALOGV("DEBUG(%s): frame left behind : completed(%d), preparing(%d)", __FUNCTION__, m_completedIndex,tempFrameOutputIndex);
397
398        request_manager_entry * currentEntry2 =  &(entries[tempFrameOutputIndex]);
399        currentEntry2->status = EMPTY;
400        currentEntry2->original_request = NULL;
401        memset(&(currentEntry2->internal_shot), 0, sizeof(camera2_ctl_metadata_NEW_t));
402        currentEntry2->internal_shot.ctl.request.frameCount = -1;
403        currentEntry2->output_stream_count = 0;
404        currentEntry2->dynamic_meta_vaild = false;
405        m_numOfEntries--;
406        // Dump();
407        tempFrameOutputIndex = m_completedIndex;
408        currentEntry =  &(entries[tempFrameOutputIndex]);
409    }
410
411    if (currentEntry->output_stream_count!=0) {
412        ALOGD("DBG(%s): Circular buffer has remaining output : stream_count(%d)", __FUNCTION__, currentEntry->output_stream_count);
413        return false;
414    }
415
416    if (currentEntry->status!=PROCESSING) {
417        ALOGD("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
418
419        return false;
420    }
421    m_entryFrameOutputIndex = tempFrameOutputIndex;
422    m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 10, 500); //estimated
423    res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
424                m_tempFrameMetadata);
425    if (res!=NO_ERROR) {
426        ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
427        return false;
428    }
429    *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
430    *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
431    *prepared_frame = m_tempFrameMetadata;
432    ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d)", m_entryFrameOutputIndex,
433        currentEntry->internal_shot.ctl.request.frameCount);
434    // Dump();
435    return true;
436}
437
438int RequestManager::MarkProcessingRequest(ExynosBuffer* buf)
439{
440    ALOGV("DEBUG(%s):", __FUNCTION__);
441    Mutex::Autolock lock(m_requestMutex);
442    struct camera2_shot_ext * shot_ext;
443    int targetStreamIndex = 0;
444
445    if (m_numOfEntries == 0)  {
446        ALOGV("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
447        return -1;
448    }
449
450    if ((m_entryProcessingIndex == m_entryInsertionIndex)
451        && (entries[m_entryProcessingIndex].status == PROCESSING)) {
452        ALOGV("## MarkProcReq skipping(request underrun) -  num(%d), insert(%d), processing(%d), frame(%d)",
453         m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
454        return -1;
455    }
456
457    request_manager_entry * newEntry = NULL;
458    int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
459
460    newEntry = &(entries[newProcessingIndex]);
461
462    if (newEntry->status!=REGISTERED) {
463        ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
464    //  Dump();
465        return -1;
466    }
467    newEntry->status = PROCESSING;
468    // TODO : replace the codes below with a single memcpy of pre-converted 'shot'
469
470    shot_ext = (struct camera2_shot_ext *)(buf->virt.extP[1]);
471    memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
472
473    shot_ext->request_sensor = 1;
474    shot_ext->dis_bypass = 1;
475    shot_ext->dnr_bypass = 1;
476    for (int i = 0; i < newEntry->output_stream_count; i++) {
477        // TODO : match with actual stream index;
478        targetStreamIndex = newEntry->internal_shot.ctl.request.outputStreams[i];
479
480        if (targetStreamIndex==0) {
481            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP", __FUNCTION__, i);
482            shot_ext->request_scp = 1;
483            shot_ext->shot.ctl.request.outputStreams[0] = 1;
484        }
485        else if (targetStreamIndex == 1) {
486            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerC", __FUNCTION__, i);
487            shot_ext->request_scc = 1;
488            shot_ext->shot.ctl.request.outputStreams[1] = 1;
489        }
490        else if (targetStreamIndex == 2) {
491            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP (record)", __FUNCTION__, i);
492            shot_ext->request_scp = 1;
493            shot_ext->shot.ctl.request.outputStreams[2] = 1;
494        }
495        else {
496            ALOGV("DEBUG(%s): outputstreams(%d) has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
497        }
498    }
499    shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
500    shot_ext->shot.magicNumber = 0x23456789;
501    shot_ext->shot.ctl.sensor.exposureTime = 0;
502    shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
503    shot_ext->shot.ctl.sensor.sensitivity = 0;
504
505    shot_ext->shot.ctl.scaler.cropRegion[0] = 0;
506    shot_ext->shot.ctl.scaler.cropRegion[1] = 0;
507    shot_ext->shot.ctl.scaler.cropRegion[2] = m_cropX;
508
509    m_entryProcessingIndex = newProcessingIndex;
510
511    //    Dump();
512    ALOGV("## MarkProcReq DONE totalentry(%d), insert(%d), processing(%d), frame(%d) frameCnt(%d)",
513    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.ctl.request.frameCount);
514
515    return m_entryProcessingIndex;
516}
517
518void RequestManager::NotifyStreamOutput(int frameCnt, int stream_id)
519{
520    int index;
521
522    ALOGV("DEBUG(%s): frameCnt(%d), stream_id(%d)", __FUNCTION__, frameCnt, stream_id);
523
524    index = FindEntryIndexByFrameCnt(frameCnt);
525    if (index == -1) {
526        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
527        return;
528    }
529    ALOGV("DEBUG(%s): frameCnt(%d), stream_id(%d) last cnt (%d)", __FUNCTION__, frameCnt, stream_id,  entries[index].output_stream_count);
530
531    entries[index].output_stream_count--;  //TODO : match stream id also
532    CheckCompleted(index);
533
534    return;
535}
536
537void RequestManager::CheckCompleted(int index)
538{
539    ALOGV("DEBUG(%s): reqIndex(%d) current Count(%d)", __FUNCTION__, index, entries[index].output_stream_count);
540    if (entries[index].output_stream_count==0 && entries[index].dynamic_meta_vaild) {
541        ALOGV("DEBUG(%s): index[%d] completed and sending SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__, index);
542//        Dump();
543        m_completedIndex = index;
544        m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
545    }
546    return;
547}
548
549void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext, int frameCnt)
550{
551    int index;
552
553    ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
554
555    index = FindEntryIndexByFrameCnt(frameCnt);
556    if (index == -1) {
557        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
558        return;
559    }
560
561    request_manager_entry * newEntry = &(entries[index]);
562
563    newEntry->dynamic_meta_vaild = true;
564
565    // TODO : move some code of PrepareFrame here
566
567    CheckCompleted(index);
568}
569
570void RequestManager::DumpInfoWithIndex(int index)
571{
572    camera2_ctl_metadata_NEW_t * currMetadata = &(entries[index].internal_shot);
573
574    ALOGV("####   frameCount(%d) exposureTime(%lld) ISO(%d)",
575        currMetadata->ctl.request.frameCount,
576        currMetadata->ctl.sensor.exposureTime,
577        currMetadata->ctl.sensor.sensitivity);
578    if (currMetadata->ctl.request.numOutputStream==0)
579        ALOGV("####   No output stream selected");
580    else if (currMetadata->ctl.request.numOutputStream==1)
581        ALOGV("####   OutputStreamId : %d", currMetadata->ctl.request.outputStreams[0]);
582    else if (currMetadata->ctl.request.numOutputStream==2)
583        ALOGV("####   OutputStreamId : %d, %d", currMetadata->ctl.request.outputStreams[0],
584            currMetadata->ctl.request.outputStreams[1]);
585    else
586        ALOGV("####   OutputStream num (%d) abnormal ", currMetadata->ctl.request.numOutputStream);
587}
588
589void    RequestManager::UpdateOutputStreamInfo(struct camera2_shot_ext *shot_ext, int frameCnt)
590{
591    int index, targetStreamIndex;
592
593    ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
594    if (frameCnt < 0)
595        return;
596
597    index = FindEntryIndexByFrameCnt(frameCnt);
598    if (index == -1) {
599        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
600        return;
601    }
602
603    request_manager_entry * newEntry = &(entries[index]);
604    shot_ext->request_sensor = 1;
605    shot_ext->request_scc = 0;
606    shot_ext->request_scp = 0;
607    shot_ext->shot.ctl.request.outputStreams[0] = 0;
608    shot_ext->shot.ctl.request.outputStreams[1] = 0;
609    shot_ext->shot.ctl.request.outputStreams[2] = 0;
610
611    for (int i = 0; i < newEntry->output_stream_count; i++) {
612        // TODO : match with actual stream index;
613        targetStreamIndex = newEntry->internal_shot.ctl.request.outputStreams[i];
614
615        if (targetStreamIndex==0) {
616            ALOGV("DEBUG(%s): outputstreams item[%d] is for scalerP", __FUNCTION__, i);
617            shot_ext->request_scp = 1;
618            shot_ext->shot.ctl.request.outputStreams[0] = 1;
619        }
620        else if (targetStreamIndex == 1) {
621            ALOGV("DEBUG(%s): outputstreams item[%d] is for scalerC", __FUNCTION__, i);
622            shot_ext->request_scc = 1;
623            shot_ext->shot.ctl.request.outputStreams[1] = 1;
624        }
625        else if (targetStreamIndex == 2) {
626            ALOGV("DEBUG(%s): outputstreams item[%d] is for scalerP (record)", __FUNCTION__, i);
627            shot_ext->request_scp = 1;
628            shot_ext->shot.ctl.request.outputStreams[2] = 1;
629        }
630        else {
631            ALOGV("DEBUG(%s): outputstreams item[%d] has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
632        }
633    }
634}
635
636int     RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
637{
638    for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
639        if (entries[i].internal_shot.ctl.request.frameCount == frameCnt)
640            return i;
641    }
642    return -1;
643}
644
645void    RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
646{
647    int index = FindEntryIndexByFrameCnt(frameCnt);
648    if (index == -1) {
649        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
650        return;
651    }
652
653    request_manager_entry * currentEntry = &(entries[index]);
654    currentEntry->internal_shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
655    ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
656        index, frameCnt, currentEntry->internal_shot.dm.sensor.timeStamp);
657}
658
659uint64_t  RequestManager::GetTimestamp(int frameCnt)
660{
661    int index = FindEntryIndexByFrameCnt(frameCnt);
662    if (index == -1) {
663        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
664        return 0;
665    }
666
667    request_manager_entry * currentEntry = &(entries[index]);
668    uint64_t frameTime = currentEntry->internal_shot.dm.sensor.timeStamp;
669    ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
670    return frameTime;
671}
672
673int     RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
674{
675	int tempIndex;
676	//HACK
677    if (tempInitialSkipCnt != 0) {
678        tempInitialSkipCnt--;
679        return -1;
680    }
681/*
682 *     tempIndex = GetNextIndex(tempIndex);
683 *         return entries[tempIndex].internal_shot.ctl.request.frameCount;
684 *         */
685    tempIndex = GetNextIndex(m_entryFrameOutputIndex);
686    return entries[tempIndex].internal_shot.ctl.request.frameCount;
687}
688
689void RequestManager::Dump(void)
690{
691    int i = 0;
692    request_manager_entry * currentEntry;
693    ALOGV("## Dump  totalentry(%d), insert(%d), processing(%d), frame(%d)",
694    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
695
696    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
697        currentEntry =  &(entries[i]);
698        ALOGV("[%2d] status[%d] frameCnt[%3d] numOutput[%d]", i,
699        currentEntry->status, currentEntry->internal_shot.ctl.request.frameCount,
700            currentEntry->output_stream_count);
701    }
702}
703
704int     RequestManager::GetNextIndex(int index)
705{
706    index++;
707    if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
708        index = 0;
709
710    return index;
711}
712
713ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev):
714            m_requestQueueOps(NULL),
715            m_frameQueueOps(NULL),
716            m_callbackCookie(NULL),
717            m_numOfRemainingReqInSvc(0),
718            m_isRequestQueuePending(false),
719            m_isRequestQueueNull(true),
720            m_isSensorThreadOn(false),
721            m_isSensorStarted(false),
722            m_ionCameraClient(0),
723            m_initFlag1(false),
724            m_initFlag2(false),
725            m_scp_flushing(false),
726            m_closing(false),
727            m_lastTimeStamp(0),
728            m_recordingEnabled(false),
729            m_needsRecordBufferInit(false),
730            lastFrameCnt(-1),
731            m_scp_closing(false),
732            m_scp_closed(false),
733            m_savecnt(0),
734            m_halDevice(dev),
735            m_cameraId(0)
736{
737    ALOGV("DEBUG(%s):", __FUNCTION__);
738    int ret = 0;
739
740    m_exynosPictureCSC = NULL;
741    m_exynosVideoCSC = NULL;
742
743    if (!m_grallocHal) {
744        ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
745        if (ret)
746            ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
747    }
748
749    m_ionCameraClient = createIonClient(m_ionCameraClient);
750    if(m_ionCameraClient == 0)
751        ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
752
753    m_cameraId = cameraId;
754
755    m_BayerManager = new BayerBufManager();
756    m_mainThread    = new MainThread(this);
757    m_sensorThread  = new SensorThread(this);
758    m_ispThread     = new IspThread(this);
759    m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
760    ALOGV("DEBUG(%s): created sensorthread ################", __FUNCTION__);
761    usleep(1600000);
762
763    m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
764    CSC_METHOD cscMethod = CSC_METHOD_HW;
765    m_exynosPictureCSC = csc_init(cscMethod);
766    if (m_exynosPictureCSC == NULL)
767        ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
768    csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
769
770    m_exynosVideoCSC = csc_init(cscMethod);
771    if (m_exynosVideoCSC == NULL)
772        ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
773    csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, PREVIEW_GSC_NODE_NUM);
774
775    ALOGV("DEBUG(%s): END", __FUNCTION__);
776}
777
778ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
779{
780    ALOGD("%s: ENTER", __FUNCTION__);
781    this->release();
782    ALOGD("%s: EXIT", __FUNCTION__);
783}
784
785void ExynosCameraHWInterface2::release()
786{
787    int i, res;
788    ALOGD("%s: ENTER", __func__);
789    m_closing = true;
790
791    while (!m_scp_closed)
792        usleep(1000);
793    if (m_ispThread != NULL) {
794        m_ispThread->release();
795    }
796
797    if (m_sensorThread != NULL) {
798        m_sensorThread->release();
799    }
800
801    if (m_mainThread != NULL) {
802        m_mainThread->release();
803    }
804
805    if (m_streamThreads[0] != NULL) {
806        m_streamThreads[0]->release();
807        m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
808    }
809
810    if (m_streamThreads[1] != NULL) {
811        m_streamThreads[1]->release();
812        m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
813    }
814
815
816    if (m_exynosPictureCSC)
817        csc_deinit(m_exynosPictureCSC);
818    m_exynosPictureCSC = NULL;
819
820    if (m_exynosVideoCSC)
821        csc_deinit(m_exynosVideoCSC);
822    m_exynosVideoCSC = NULL;
823
824    if (m_ispThread != NULL) {
825        while (!m_ispThread->IsTerminated())
826            usleep(1000);
827        m_ispThread = NULL;
828    }
829
830    if (m_sensorThread != NULL) {
831        while (!m_sensorThread->IsTerminated())
832            usleep(1000);
833        m_sensorThread = NULL;
834    }
835
836    if (m_mainThread != NULL) {
837        while (!m_mainThread->IsTerminated())
838            usleep(1000);
839        m_mainThread = NULL;
840    }
841
842    if (m_streamThreads[0] != NULL) {
843        while (!m_streamThreads[0]->IsTerminated())
844            usleep(1000);
845        m_streamThreads[0] = NULL;
846    }
847
848    if (m_streamThreads[1] != NULL) {
849        while (!m_streamThreads[1]->IsTerminated())
850            usleep(1000);
851        m_streamThreads[1] = NULL;
852    }
853
854    for(i = 0; i < m_camera_info.sensor.buffers; i++)
855        freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
856
857    for(i = 0; i < m_camera_info.capture.buffers; i++)
858        freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
859
860    ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
861    res = exynos_v4l2_close(m_camera_info.sensor.fd);
862    if (res != NO_ERROR ) {
863        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
864    }
865
866    ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
867    res = exynos_v4l2_close(m_camera_info.isp.fd);
868    if (res != NO_ERROR ) {
869        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
870    }
871
872    ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
873    res = exynos_v4l2_close(m_camera_info.capture.fd);
874    if (res != NO_ERROR ) {
875        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
876    }
877
878    ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
879    res = exynos_v4l2_close(m_fd_scp);
880    if (res != NO_ERROR ) {
881        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
882    }
883    ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
884    deleteIonClient(m_ionCameraClient);
885
886    ALOGD("%s: EXIT", __func__);
887}
888
889int ExynosCameraHWInterface2::getCameraId() const
890{
891    return m_cameraId;
892}
893
894int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
895{
896    ALOGV("DEBUG(%s):", __FUNCTION__);
897    if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
898            && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
899        m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
900        return 0;
901    }
902    else {
903        ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
904        return 1;
905    }
906}
907
908int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
909{
910    ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY]", __FUNCTION__);
911    if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
912        ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
913        return 0;
914    }
915    m_isRequestQueueNull = false;
916    m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
917    return 0;
918}
919
920int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
921{
922    ALOGV("DEBUG(%s):", __FUNCTION__);
923    if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
924            && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
925        m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
926        return 0;
927    }
928    else {
929        ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
930        return 1;
931    }
932}
933
934int ExynosCameraHWInterface2::getInProgressCount()
935{
936    int inProgressCount = m_requestManager->GetNumEntries();
937    ALOGV("DEBUG(%s): # of dequeued req (%d)", __FUNCTION__, inProgressCount);
938    return inProgressCount;
939}
940
941int ExynosCameraHWInterface2::flushCapturesInProgress()
942{
943    return 0;
944}
945
946int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
947{
948    ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
949
950    if (request == NULL) return BAD_VALUE;
951    if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
952        return BAD_VALUE;
953    }
954    status_t res;
955    // Pass 1, calculate size and allocate
956    res = constructDefaultRequestInternal(request_template,
957            request,
958            true);
959    if (res != OK) {
960        return res;
961    }
962    // Pass 2, build request
963    res = constructDefaultRequestInternal(request_template,
964            request,
965            false);
966    if (res != OK) {
967        ALOGE("Unable to populate new request for template %d",
968                request_template);
969    }
970
971    return res;
972}
973
974int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
975                                    uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
976{
977    ALOGD("DEBUG(%s): allocate stream width(%d) height(%d) format(%x)", __FUNCTION__,  width, height, format);
978    char node_name[30];
979    int fd = 0, allocCase = 0;
980    StreamThread *AllocatedStream;
981    stream_parameters_t newParameters;
982
983    if ((format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE &&
984        isSupportedPreviewSize(m_cameraId, width, height))) {
985        if (!(m_streamThreads[0].get())) {
986            ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
987            allocCase = 0;
988        }
989        else {
990            if ((m_streamThreads[0].get())->m_activated == TRUE) {
991                ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
992                allocCase = 1;
993            }
994            else {
995                ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
996                allocCase = 2;
997            }
998        }
999        if (allocCase == 0 || allocCase == 2) {
1000            *stream_id = 0;
1001
1002            if (allocCase == 0) {
1003                m_streamThreads[0]  = new StreamThread(this, *stream_id);
1004
1005
1006                memset(&node_name, 0x00, sizeof(char[30]));
1007                sprintf(node_name, "%s%d", NODE_PREFIX, 44);
1008                fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1009                if (fd < 0) {
1010                    ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1011                }
1012                else {
1013                    ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1014                }
1015                m_fd_scp = fd;
1016             }
1017            AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
1018            usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
1019
1020            *format_actual = HAL_PIXEL_FORMAT_YV12;
1021            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_YUV_ADDR;
1022            *max_buffers = 8;
1023
1024            newParameters.streamType    = 0;
1025            newParameters.outputWidth   = width;
1026            newParameters.outputHeight  = height;
1027            newParameters.nodeWidth     = width;
1028            newParameters.nodeHeight    = height;
1029            newParameters.outputFormat  = *format_actual;
1030            newParameters.nodeFormat    = HAL_PIXEL_FORMAT_2_V4L2_PIX(*format_actual);
1031            newParameters.streamOps     = stream_ops;
1032            newParameters.usage         = *usage;
1033            newParameters.numHwBuffers  = *max_buffers;
1034            newParameters.fd            = m_fd_scp;
1035            newParameters.nodePlanes    = 3;
1036            newParameters.svcPlanes     = 3;
1037            newParameters.halBuftype    = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1038            newParameters.memory        = V4L2_MEMORY_DMABUF;
1039            newParameters.ionClient     = m_ionCameraClient;
1040            AllocatedStream->m_index = *stream_id;
1041            AllocatedStream->setParameter(&newParameters);
1042            AllocatedStream->m_activated = true;
1043
1044            m_scp_flushing = false;
1045            m_scp_closing = false;
1046            m_scp_closed = false;
1047            m_requestManager->SetDefaultParameters(width);
1048            m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = width;
1049            return 0;
1050        }
1051        else if (allocCase == 1) {
1052            record_parameters_t recordParameters;
1053            StreamThread *parentStream;
1054            parentStream = (StreamThread*)(m_streamThreads[0].get());
1055            if (!parentStream) {
1056                return 1;
1057                // TODO
1058            }
1059            *stream_id = 2;
1060            usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
1061
1062            *format_actual = HAL_PIXEL_FORMAT_RGBA_8888;
1063            *usage = GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_YUV_ADDR;
1064            *max_buffers = 10;
1065
1066            recordParameters.outputWidth   = width;
1067            recordParameters.outputHeight  = height;
1068            recordParameters.outputFormat     = *format_actual;
1069            recordParameters.svcPlanes        = 1;
1070            recordParameters.streamOps     = stream_ops;
1071            recordParameters.usage         = *usage;
1072            recordParameters.numBufsInHal  = 0;
1073
1074            parentStream->setRecordingParameter(&recordParameters);
1075            m_scp_flushing = false;
1076            m_scp_closing = false;
1077            m_scp_closed = false;
1078            m_recordingEnabled = true;
1079            return 0;
1080        }
1081    }
1082    else if (format == HAL_PIXEL_FORMAT_BLOB
1083            && isSupportedJpegSize(m_cameraId, width, height)) {
1084
1085        *stream_id = 1;
1086
1087        m_streamThreads[1]  = new StreamThread(this, *stream_id);
1088        AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1089
1090        fd = m_camera_info.capture.fd;
1091        usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
1092
1093        *format_actual = HAL_PIXEL_FORMAT_BLOB;
1094
1095        *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1096        *max_buffers = 8;
1097
1098        newParameters.streamType    = 1;
1099        newParameters.outputWidth   = width;
1100        newParameters.outputHeight  = height;
1101
1102        newParameters.nodeWidth     = getSccOutputSizeX(m_cameraId);
1103        newParameters.nodeHeight    = getSccOutputSizeY(m_cameraId);
1104
1105        newParameters.outputFormat  = *format_actual;
1106        newParameters.nodeFormat    = V4L2_PIX_FMT_YUYV;
1107        newParameters.streamOps     = stream_ops;
1108        newParameters.usage         = *usage;
1109        newParameters.numHwBuffers  = *max_buffers;
1110        newParameters.fd            = fd;
1111        newParameters.nodePlanes    = 1;
1112        newParameters.svcPlanes     = 1;
1113        newParameters.halBuftype    = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1114        newParameters.memory        = V4L2_MEMORY_DMABUF;
1115        newParameters.ionClient     = m_ionCameraClient;
1116        AllocatedStream->m_index = *stream_id;
1117        AllocatedStream->setParameter(&newParameters);
1118        return 0;
1119    }
1120    ALOGE("DEBUG(%s): Unsupported Pixel Format", __FUNCTION__);
1121    return 1; // TODO : check proper error code
1122}
1123
1124int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
1125        int num_buffers, buffer_handle_t *registeringBuffers)
1126{
1127    int                     i,j;
1128    void                    *virtAddr[3];
1129    uint32_t                plane_index = 0;
1130    stream_parameters_t     *targetStreamParms;
1131    record_parameters_t     *targetRecordParms;
1132    node_info_t             *currentNode;
1133
1134    struct v4l2_buffer v4l2_buf;
1135    struct v4l2_plane  planes[VIDEO_MAX_PLANES];
1136
1137    ALOGV("DEBUG(%s): streamID (%d), num_buff(%d), handle(%x) ", __FUNCTION__,
1138        stream_id, num_buffers, (uint32_t)registeringBuffers);
1139
1140    if (stream_id == 0) {
1141        targetStreamParms = &(m_streamThreads[0]->m_parameters);
1142    }
1143    else if (stream_id == 1) {
1144        targetStreamParms = &(m_streamThreads[1]->m_parameters);
1145    }
1146    else if (stream_id == 2) {
1147        targetRecordParms = &(m_streamThreads[0]->m_recordParameters);
1148
1149        targetRecordParms->numSvcBuffers = num_buffers;
1150
1151        for (i = 0 ; i<targetRecordParms->numSvcBuffers ; i++) {
1152            ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
1153                i, (uint32_t)(registeringBuffers[i]));
1154            if (m_grallocHal) {
1155                if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
1156                       targetRecordParms->usage, 0, 0,
1157                       targetRecordParms->outputWidth, targetRecordParms->outputHeight, virtAddr) != 0) {
1158                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
1159                }
1160                else {
1161                    ExynosBuffer currentBuf;
1162                    const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
1163                    m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(targetRecordParms->outputFormat),
1164                        targetRecordParms->outputWidth, targetRecordParms->outputHeight, &currentBuf);
1165                    currentBuf.fd.extFd[0] = priv_handle->fd;
1166                    currentBuf.fd.extFd[1] = priv_handle->u_fd;
1167                    currentBuf.fd.extFd[2] = priv_handle->v_fd;
1168                    ALOGV("DEBUG(%s):  yddr(%x), uoffset(%d), voffset(%d)", __FUNCTION__,priv_handle->yaddr, priv_handle->uoffset, priv_handle->voffset);
1169                    ALOGV("DEBUG(%s):  ion_size(%d), stride(%d), ", __FUNCTION__,priv_handle->size, priv_handle->stride);
1170                    for (plane_index=0 ; plane_index < targetRecordParms->svcPlanes ; plane_index++) {
1171                        currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
1172                        ALOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x)",
1173                             __FUNCTION__, plane_index, currentBuf.fd.extFd[i],
1174                             (unsigned int)currentBuf.virt.extP[plane_index]);
1175                    }
1176                    targetRecordParms->svcBufStatus[i]  = ON_SERVICE;
1177                    targetRecordParms->svcBuffers[i]       = currentBuf;
1178                    targetRecordParms->svcBufHandle[i]     = registeringBuffers[i];
1179                }
1180            }
1181        }
1182        m_needsRecordBufferInit = true;
1183        return 0;
1184    }
1185    else {
1186        ALOGE("ERR(%s) unregisterd stream id (%d)", __FUNCTION__, stream_id);
1187        return 1; // TODO : proper error code?
1188    }
1189
1190    if (targetStreamParms->streamType ==0) {
1191        if (num_buffers < targetStreamParms->numHwBuffers) {
1192            ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
1193                __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
1194            return 1; // TODO : proper error code?
1195        }
1196    }
1197    ALOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
1198            __FUNCTION__, targetStreamParms->outputFormat, targetStreamParms->outputWidth,
1199            targetStreamParms->outputHeight, targetStreamParms->svcPlanes);
1200
1201    targetStreamParms->numSvcBuffers = num_buffers;
1202    currentNode = &(targetStreamParms->node); // TO Remove
1203
1204    currentNode->fd         = targetStreamParms->fd;
1205    currentNode->width      = targetStreamParms->nodeWidth;
1206    currentNode->height     = targetStreamParms->nodeHeight;
1207    currentNode->format     = targetStreamParms->nodeFormat;
1208    currentNode->planes     = targetStreamParms->nodePlanes;
1209    currentNode->buffers    = targetStreamParms->numHwBuffers;
1210    currentNode->type       = targetStreamParms->halBuftype;
1211    currentNode->memory     = targetStreamParms->memory;
1212    currentNode->ionClient  = targetStreamParms->ionClient;
1213
1214    if (targetStreamParms->streamType == 0) {
1215        cam_int_s_input(currentNode, m_camera_info.sensor_id);
1216        cam_int_s_fmt(currentNode);
1217        cam_int_reqbufs(currentNode);
1218    }
1219    else if (targetStreamParms->streamType == 1) {
1220        for(i = 0; i < currentNode->buffers; i++){
1221            memcpy(&(currentNode->buffer[i]), &(m_camera_info.capture.buffer[i]), sizeof(ExynosBuffer));
1222        }
1223    }
1224
1225    for (i = 0 ; i<targetStreamParms->numSvcBuffers ; i++) {
1226        ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
1227            i, (uint32_t)(registeringBuffers[i]));
1228        if (m_grallocHal) {
1229            if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
1230                   targetStreamParms->usage, 0, 0,
1231                   currentNode->width, currentNode->height, virtAddr) != 0) {
1232                ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
1233            }
1234            else {
1235                v4l2_buf.m.planes   = planes;
1236                v4l2_buf.type       = currentNode->type;
1237                v4l2_buf.memory     = currentNode->memory;
1238                v4l2_buf.index      = i;
1239                v4l2_buf.length     = currentNode->planes;
1240
1241                ExynosBuffer currentBuf;
1242                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
1243
1244                m_getAlignedYUVSize(currentNode->format,
1245                    currentNode->width, currentNode->height, &currentBuf);
1246
1247                v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
1248                v4l2_buf.m.planes[2].m.fd = priv_handle->u_fd;
1249                v4l2_buf.m.planes[1].m.fd = priv_handle->v_fd;
1250                currentBuf.fd.extFd[0] = priv_handle->fd;
1251                currentBuf.fd.extFd[2] = priv_handle->u_fd;
1252                currentBuf.fd.extFd[1] = priv_handle->v_fd;
1253                ALOGV("DEBUG(%s):  yddr(%x), uoffset(%d), voffset(%d)", __FUNCTION__,priv_handle->yaddr, priv_handle->uoffset, priv_handle->voffset);
1254                ALOGV("DEBUG(%s):  ion_size(%d), stride(%d), ", __FUNCTION__,priv_handle->size, priv_handle->stride);
1255
1256
1257                for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
1258                    currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
1259                    v4l2_buf.m.planes[plane_index].length  = currentBuf.size.extS[plane_index];
1260                    ALOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x), length(%d)",
1261                         __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
1262                         (unsigned int)currentBuf.virt.extP[plane_index],
1263                         v4l2_buf.m.planes[plane_index].length);
1264                }
1265
1266                if (targetStreamParms->streamType == 0) {
1267                    if (i < currentNode->buffers) {
1268                        if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
1269                            ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
1270                                __FUNCTION__, stream_id, currentNode->fd);
1271                            //return false;
1272                        }
1273                        ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
1274                                __FUNCTION__, stream_id, currentNode->fd);
1275                        targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
1276                    }
1277                    else {
1278                        targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
1279                    }
1280                }
1281                else if (targetStreamParms->streamType == 1) {
1282                    targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
1283                }
1284                targetStreamParms->svcBuffers[i]       = currentBuf;
1285                targetStreamParms->svcBufHandle[i]     = registeringBuffers[i];
1286            }
1287        }
1288    }
1289    ALOGV("DEBUG(%s): calling  streamon", __FUNCTION__);
1290    cam_int_streamon(&(targetStreamParms->node));
1291    ALOGV("DEBUG(%s): calling  streamon END", __FUNCTION__);
1292    ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
1293    return 0;
1294}
1295
1296int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
1297{
1298    StreamThread *targetStream;
1299    ALOGV("DEBUG(%s):", __FUNCTION__);
1300
1301    if (stream_id==0) {
1302        targetStream = (StreamThread*)(m_streamThreads[0].get());
1303        m_scp_flushing = true;
1304    }
1305    else if (stream_id==1) {
1306        targetStream = (StreamThread*)(m_streamThreads[1].get());
1307    }
1308    else {
1309        ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
1310        return 1; // TODO : proper error code?
1311    }
1312
1313    targetStream->release();
1314    targetStream->m_activated = false;
1315    ALOGV("DEBUG(%s): DONE", __FUNCTION__);
1316    return 0;
1317}
1318
1319int ExynosCameraHWInterface2::allocateReprocessStream(
1320    uint32_t width, uint32_t height, uint32_t format,
1321    const camera2_stream_in_ops_t *reprocess_stream_ops,
1322    uint32_t *stream_id, uint32_t *consumer_usage, uint32_t *max_buffers)
1323{
1324    ALOGV("DEBUG(%s):", __FUNCTION__);
1325    return 0;
1326}
1327
1328int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
1329{
1330    ALOGV("DEBUG(%s):", __FUNCTION__);
1331    return 0;
1332}
1333
1334int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
1335{
1336    ALOGV("DEBUG(%s):", __FUNCTION__);
1337    return 0;
1338}
1339
1340int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
1341{
1342    ALOGV("DEBUG(%s):", __FUNCTION__);
1343    m_notifyCb = notify_cb;
1344    m_callbackCookie = user;
1345    return 0;
1346}
1347
1348int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
1349{
1350    ALOGV("DEBUG(%s):", __FUNCTION__);
1351    return 0;
1352}
1353
1354int ExynosCameraHWInterface2::dump(int fd)
1355{
1356    ALOGV("DEBUG(%s):", __FUNCTION__);
1357    return 0;
1358}
1359
1360void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
1361{
1362    switch (colorFormat) {
1363    // 1p
1364    case V4L2_PIX_FMT_RGB565 :
1365    case V4L2_PIX_FMT_YUYV :
1366    case V4L2_PIX_FMT_UYVY :
1367    case V4L2_PIX_FMT_VYUY :
1368    case V4L2_PIX_FMT_YVYU :
1369        buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
1370        buf->size.extS[1] = 0;
1371        buf->size.extS[2] = 0;
1372        break;
1373    // 2p
1374    case V4L2_PIX_FMT_NV12 :
1375    case V4L2_PIX_FMT_NV12T :
1376    case V4L2_PIX_FMT_NV21 :
1377        buf->size.extS[0] = ALIGN(w,   16) * ALIGN(h,   16);
1378        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
1379        buf->size.extS[2] = 0;
1380        break;
1381    case V4L2_PIX_FMT_NV12M :
1382    case V4L2_PIX_FMT_NV12MT_16X16 :
1383    case V4L2_PIX_FMT_NV21M:
1384        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h,     16);
1385        buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
1386        buf->size.extS[2] = 0;
1387        break;
1388    case V4L2_PIX_FMT_NV16 :
1389    case V4L2_PIX_FMT_NV61 :
1390        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
1391        buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h,  16);
1392        buf->size.extS[2] = 0;
1393        break;
1394     // 3p
1395    case V4L2_PIX_FMT_YUV420 :
1396    case V4L2_PIX_FMT_YVU420 :
1397        buf->size.extS[0] = (w * h);
1398        buf->size.extS[1] = (w * h) >> 2;
1399        buf->size.extS[2] = (w * h) >> 2;
1400        break;
1401    case V4L2_PIX_FMT_YUV420M:
1402    case V4L2_PIX_FMT_YVU420M :
1403    case V4L2_PIX_FMT_YUV422P :
1404        buf->size.extS[0] = ALIGN(w,  32) * ALIGN(h,  16);
1405        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
1406        buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
1407        break;
1408    default:
1409        ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
1410        return;
1411        break;
1412    }
1413}
1414
1415bool ExynosCameraHWInterface2::m_getRatioSize(int  src_w,  int   src_h,
1416                                             int  dst_w,  int   dst_h,
1417                                             int *crop_x, int *crop_y,
1418                                             int *crop_w, int *crop_h,
1419                                             int zoom)
1420{
1421    *crop_w = src_w;
1422    *crop_h = src_h;
1423
1424    if (   src_w != dst_w
1425        || src_h != dst_h) {
1426        float src_ratio = 1.0f;
1427        float dst_ratio = 1.0f;
1428
1429        // ex : 1024 / 768
1430        src_ratio = (float)src_w / (float)src_h;
1431
1432        // ex : 352  / 288
1433        dst_ratio = (float)dst_w / (float)dst_h;
1434
1435        if (dst_w * dst_h < src_w * src_h) {
1436            if (dst_ratio <= src_ratio) {
1437                // shrink w
1438                *crop_w = src_h * dst_ratio;
1439                *crop_h = src_h;
1440            } else {
1441                // shrink h
1442                *crop_w = src_w;
1443                *crop_h = src_w / dst_ratio;
1444            }
1445        } else {
1446            if (dst_ratio <= src_ratio) {
1447                // shrink w
1448                *crop_w = src_h * dst_ratio;
1449                *crop_h = src_h;
1450            } else {
1451                // shrink h
1452                *crop_w = src_w;
1453                *crop_h = src_w / dst_ratio;
1454            }
1455        }
1456    }
1457
1458    if (zoom != 0) {
1459        float zoomLevel = ((float)zoom + 10.0) / 10.0;
1460        *crop_w = (int)((float)*crop_w / zoomLevel);
1461        *crop_h = (int)((float)*crop_h / zoomLevel);
1462    }
1463
1464    #define CAMERA_CROP_WIDTH_RESTRAIN_NUM  (0x2)
1465    unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
1466    if (w_align != 0) {
1467        if (  (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
1468            && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) {
1469            *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
1470        }
1471        else
1472            *crop_w -= w_align;
1473    }
1474
1475    #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM  (0x2)
1476    unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
1477    if (h_align != 0) {
1478        if (  (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
1479            && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) {
1480            *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
1481        }
1482        else
1483            *crop_h -= h_align;
1484    }
1485
1486    *crop_x = (src_w - *crop_w) >> 1;
1487    *crop_y = (src_h - *crop_h) >> 1;
1488
1489    if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
1490        *crop_x -= 1;
1491
1492    if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
1493        *crop_y -= 1;
1494
1495    return true;
1496}
1497
1498BayerBufManager::BayerBufManager()
1499{
1500    ALOGV("DEBUG(%s): ", __FUNCTION__);
1501    for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) {
1502        entries[i].status = BAYER_ON_HAL_EMPTY;
1503        entries[i].reqFrameCnt = 0;
1504    }
1505    sensorEnqueueHead = 0;
1506    sensorDequeueHead = 0;
1507    ispEnqueueHead = 0;
1508    ispDequeueHead = 0;
1509    numOnSensor = 0;
1510    numOnIsp = 0;
1511    numOnHalFilled = 0;
1512    numOnHalEmpty = NUM_BAYER_BUFFERS;
1513}
1514
1515int     BayerBufManager::GetIndexForSensorEnqueue()
1516{
1517    int ret = 0;
1518    if (numOnHalEmpty == 0)
1519        ret = -1;
1520    else
1521        ret = sensorEnqueueHead;
1522    ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret);
1523    return ret;
1524}
1525
1526int    BayerBufManager::MarkSensorEnqueue(int index)
1527{
1528    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
1529
1530    // sanity check
1531    if (index != sensorEnqueueHead) {
1532        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead);
1533        return -1;
1534    }
1535    if (entries[index].status != BAYER_ON_HAL_EMPTY) {
1536        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
1537            index, entries[index].status, BAYER_ON_HAL_EMPTY);
1538        return -1;
1539    }
1540
1541    entries[index].status = BAYER_ON_SENSOR;
1542    entries[index].reqFrameCnt = 0;
1543    numOnHalEmpty--;
1544    numOnSensor++;
1545    sensorEnqueueHead = GetNextIndex(index);
1546    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
1547        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
1548    return 0;
1549}
1550
1551int    BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t *timeStamp)
1552{
1553    ALOGV("DEBUG(%s)    : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
1554
1555    // sanity check
1556    if (index != sensorDequeueHead) {
1557        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorDequeueHead);
1558        return -1;
1559    }
1560    if (entries[index].status != BAYER_ON_SENSOR) {
1561        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
1562            index, entries[index].status, BAYER_ON_SENSOR);
1563        return -1;
1564    }
1565
1566    entries[index].status = BAYER_ON_HAL_FILLED;
1567    entries[index].reqFrameCnt = reqFrameCnt;
1568    entries[index].timeStamp = *timeStamp;
1569    numOnHalFilled++;
1570    numOnSensor--;
1571    sensorDequeueHead = GetNextIndex(index);
1572    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
1573        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
1574    return 0;
1575}
1576
1577int     BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt)
1578{
1579    int ret = 0;
1580    if (numOnHalFilled == 0)
1581        ret = -1;
1582    else {
1583        *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt;
1584        ret = ispEnqueueHead;
1585    }
1586    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
1587    return ret;
1588}
1589
1590int     BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt)
1591{
1592    int ret = 0;
1593    if (numOnIsp == 0)
1594        ret = -1;
1595    else {
1596        *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt;
1597        ret = ispDequeueHead;
1598    }
1599    ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
1600    return ret;
1601}
1602
1603int    BayerBufManager::MarkIspEnqueue(int index)
1604{
1605    ALOGV("DEBUG(%s)    : BayerIndex[%d] ", __FUNCTION__, index);
1606
1607    // sanity check
1608    if (index != ispEnqueueHead) {
1609        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead);
1610        return -1;
1611    }
1612    if (entries[index].status != BAYER_ON_HAL_FILLED) {
1613        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
1614            index, entries[index].status, BAYER_ON_HAL_FILLED);
1615        return -1;
1616    }
1617
1618    entries[index].status = BAYER_ON_ISP;
1619    numOnHalFilled--;
1620    numOnIsp++;
1621    ispEnqueueHead = GetNextIndex(index);
1622    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
1623        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
1624    return 0;
1625}
1626
1627int    BayerBufManager::MarkIspDequeue(int index)
1628{
1629    ALOGV("DEBUG(%s)    : BayerIndex[%d]", __FUNCTION__, index);
1630
1631    // sanity check
1632    if (index != ispDequeueHead) {
1633        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead);
1634        return -1;
1635    }
1636    if (entries[index].status != BAYER_ON_ISP) {
1637        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
1638            index, entries[index].status, BAYER_ON_ISP);
1639        return -1;
1640    }
1641
1642    entries[index].status = BAYER_ON_HAL_EMPTY;
1643    entries[index].reqFrameCnt = 0;
1644    numOnHalEmpty++;
1645    numOnIsp--;
1646    ispDequeueHead = GetNextIndex(index);
1647    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
1648        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
1649    return 0;
1650}
1651
1652int BayerBufManager::GetNumOnSensor()
1653{
1654    return numOnSensor;
1655}
1656
1657int BayerBufManager::GetNumOnHalFilled()
1658{
1659    return numOnHalFilled;
1660}
1661
1662int BayerBufManager::GetNumOnIsp()
1663{
1664    return numOnIsp;
1665}
1666
1667int     BayerBufManager::GetNextIndex(int index)
1668{
1669    index++;
1670    if (index >= NUM_BAYER_BUFFERS)
1671        index = 0;
1672
1673    return index;
1674}
1675
1676void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
1677{
1678    camera_metadata_t *currentRequest = NULL;
1679    camera_metadata_t *currentFrame = NULL;
1680    size_t numEntries = 0;
1681    size_t frameSize = 0;
1682    camera_metadata_t * preparedFrame = NULL;
1683    camera_metadata_t *deregisteredRequest = NULL;
1684    uint32_t currentSignal = self->GetProcessingSignal();
1685    MainThread *  selfThread      = ((MainThread*)self);
1686    int res = 0;
1687
1688    ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
1689
1690    if (currentSignal & SIGNAL_THREAD_RELEASE) {
1691        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
1692
1693        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
1694        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
1695        return;
1696    }
1697
1698    if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
1699        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
1700        if (m_requestManager->IsRequestQueueFull()==false
1701                && m_requestManager->GetNumEntries()<NUM_MAX_DEQUEUED_REQUEST) {
1702            m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
1703            if (NULL == currentRequest) {
1704                ALOGV("DEBUG(%s): dequeue_request returned NULL ", __FUNCTION__);
1705                m_isRequestQueueNull = true;
1706            }
1707            else {
1708                m_requestManager->RegisterRequest(currentRequest);
1709
1710                m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
1711                ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
1712                if (m_requestManager->IsRequestQueueFull()==false
1713                    && m_requestManager->GetNumEntries()<NUM_MAX_DEQUEUED_REQUEST)
1714                    selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
1715
1716                m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
1717            }
1718        }
1719        else {
1720            m_isRequestQueuePending = true;
1721        }
1722    }
1723
1724    if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
1725        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
1726        /*while (1)*/ {
1727            m_lastTimeStamp = 0;
1728            m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame);
1729            m_requestManager->DeregisterRequest(&deregisteredRequest);
1730            m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
1731            m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
1732            if (currentFrame==NULL) {
1733                ALOGD("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
1734            }
1735            else {
1736                ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries,frameSize);
1737            }
1738            res = append_camera_metadata(currentFrame, preparedFrame);
1739            if (res==0) {
1740                ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
1741                m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
1742            }
1743            else {
1744                ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
1745            }
1746        }
1747        if (!m_isRequestQueueNull) {
1748            selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
1749        }
1750
1751        if (getInProgressCount()>0) {
1752            ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
1753            m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
1754        }
1755    }
1756    ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
1757    return;
1758}
1759
1760void ExynosCameraHWInterface2::m_sensorThreadInitialize(SignalDrivenThread * self)
1761{
1762    ALOGV("DEBUG(%s): ", __FUNCTION__ );
1763    SensorThread * selfThread = ((SensorThread*)self);
1764    char node_name[30];
1765    int fd = 0;
1766    int i =0, j=0;
1767
1768    if(m_cameraId == 0)
1769        m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
1770    else
1771        m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
1772
1773    memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
1774	m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
1775	m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
1776
1777    m_camera_info.dummy_shot.dis_bypass = 1;
1778    m_camera_info.dummy_shot.dnr_bypass = 1;
1779
1780    /*sensor setting*/
1781	m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
1782	m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
1783	m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
1784
1785    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
1786    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
1787    //m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = 1920;
1788
1789	/*request setting*/
1790	m_camera_info.dummy_shot.request_sensor = 1;
1791	m_camera_info.dummy_shot.request_scc = 0;
1792	m_camera_info.dummy_shot.request_scp = 0;
1793    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
1794    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[1] = 0;
1795    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[2] = 0;
1796
1797    /*sensor init*/
1798    memset(&node_name, 0x00, sizeof(char[30]));
1799    sprintf(node_name, "%s%d", NODE_PREFIX, 40);
1800    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1801
1802    if (fd < 0) {
1803        ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1804    }
1805    else {
1806        ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1807    }
1808    m_camera_info.sensor.fd = fd;
1809
1810    m_camera_info.sensor.width = getSensorOutputSizeX(m_cameraId);
1811    m_camera_info.sensor.height = getSensorOutputSizeY(m_cameraId);
1812
1813    m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
1814    m_camera_info.sensor.planes = 2;
1815    m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1816    m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1817    m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
1818    m_camera_info.sensor.ionClient = m_ionCameraClient;
1819
1820    for(i = 0; i < m_camera_info.sensor.buffers; i++){
1821        initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1822        m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
1823        m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
1824        allocCameraMemory(m_camera_info.sensor.ionClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1825    }
1826
1827    m_initFlag1 = true;
1828
1829
1830    while (!m_initFlag2) // temp
1831        usleep(100000);
1832    ALOGV("DEBUG(%s): END of SensorThreadInitialize ", __FUNCTION__);
1833    return;
1834}
1835
1836
1837void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
1838{
1839    ALOGV("####  common Section");
1840    ALOGV("####                 magic(%x) ",
1841        shot_ext->shot.magicNumber);
1842    ALOGV("####  ctl Section");
1843    ALOGV("####     metamode(%d) exposureTime(%lld) duration(%lld) ISO(%d) ",
1844        shot_ext->shot.ctl.request.metadataMode,
1845        shot_ext->shot.ctl.sensor.exposureTime,
1846        shot_ext->shot.ctl.sensor.frameDuration,
1847        shot_ext->shot.ctl.sensor.sensitivity);
1848
1849    ALOGV("####                 OutputStream Sensor(%d) SCP(%d) SCC(%d) pv(%d) rec(%d)",
1850        shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
1851        shot_ext->shot.ctl.request.outputStreams[0],
1852        shot_ext->shot.ctl.request.outputStreams[2]);
1853
1854    ALOGV("####  DM Section");
1855    ALOGV("####     metamode(%d) exposureTime(%lld) duration(%lld) ISO(%d)  timestamp(%lld)",
1856        shot_ext->shot.dm.request.metadataMode,
1857        shot_ext->shot.dm.sensor.exposureTime,
1858        shot_ext->shot.dm.sensor.frameDuration,
1859        shot_ext->shot.dm.sensor.sensitivity,
1860//        shot_ext->shot.dm.sensor.frameCount,
1861        shot_ext->shot.dm.sensor.timeStamp);
1862}
1863
1864void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
1865{
1866    uint32_t        currentSignal = self->GetProcessingSignal();
1867    SensorThread *  selfThread      = ((SensorThread*)self);
1868    int index;
1869    status_t res;
1870    nsecs_t frameTime;
1871    int bayersOnSensor = 0, bayersOnIsp = 0;
1872    ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
1873
1874    if (currentSignal & SIGNAL_THREAD_RELEASE) {
1875        ALOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
1876
1877#if 0 // TODO
1878        for (int i = 0 ; i < NUM_BAYER_BUFFERS ;  i++) {
1879            ALOGV("DEBUG(%s):###  BayerIndex[%d] Status (%d)", __FUNCTION__, i, m_bayerBufStatus[i]);
1880            if (m_bayerBufStatus[i]==BAYER_ON_SENSOR) {
1881                bayersOnSensor++;
1882            }
1883            else if (m_bayerBufStatus[i]==BAYER_ON_ISP) {
1884                bayersOnIsp++;
1885            }
1886        }
1887        for (int i = 0 ; i < bayersOnSensor ; i++) {
1888            index = cam_int_dqbuf(&(m_camera_info.sensor));
1889            ALOGV("DEBUG(%s):###  sensor dqbuf done index(%d)", __FUNCTION__, index);
1890            m_bayerBufStatus[index] = BAYER_ON_HAL_EMPTY;
1891        }
1892        for (int i = 0 ; i < bayersOnIsp ; i++) {
1893            index = cam_int_dqbuf(&(m_camera_info.isp));
1894            ALOGV("DEBUG(%s):###  isp dqbuf done index(%d)", __FUNCTION__, index);
1895            m_bayerBufStatus[index] = BAYER_ON_HAL_EMPTY;
1896        }
1897
1898        for (int i = 0 ; i < NUM_BAYER_BUFFERS ;  i++) {
1899            ALOGV("DEBUG(%s):###  Bayer Buf[%d] Status (%d)", __FUNCTION__, i, m_bayerBufStatus[i]);
1900        }
1901#endif
1902        ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
1903        cam_int_streamoff(&(m_camera_info.sensor));
1904        ALOGV("(%s): calling sensor streamoff done", __FUNCTION__);
1905
1906        ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
1907        isp_int_streamoff(&(m_camera_info.isp));
1908        ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
1909
1910        exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
1911
1912        ALOGD("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
1913        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
1914        return;
1915    }
1916
1917    if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
1918    {
1919        ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
1920        int targetStreamIndex = 0, i=0;
1921        int matchedFrameCnt, processingReqIndex;
1922        struct camera2_shot_ext *shot_ext;
1923        if (!m_isSensorStarted)
1924        {
1925            m_isSensorStarted = true;
1926            ALOGE("==DEBUG(%s): calling preview streamon", __FUNCTION__);
1927            cam_int_streamon(&(m_streamThreads[0]->m_parameters.node));
1928            ALOGE("==DEBUG(%s): calling isp streamon done", __FUNCTION__);
1929            for (i = 0; i < m_camera_info.isp.buffers; i++) {
1930                ALOGV("DEBUG(%s): isp initial QBUF [%d]",  __FUNCTION__, i);
1931                cam_int_qbuf(&(m_camera_info.isp), i);
1932            }
1933
1934            cam_int_streamon(&(m_camera_info.isp));
1935
1936            for (i = 0; i < m_camera_info.isp.buffers; i++) {
1937                ALOGV("DEBUG(%s): isp initial DQBUF [%d]",  __FUNCTION__, i);
1938                cam_int_dqbuf(&(m_camera_info.isp));
1939            }
1940
1941            ALOGV("DEBUG(%s): calling isp sctrl done", __FUNCTION__);
1942            exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
1943            ALOGV("DEBUG(%s): calling sensor sctrl done", __FUNCTION__);
1944
1945        }
1946
1947        ALOGD("### Sensor DQBUF start");
1948        index = cam_int_dqbuf(&(m_camera_info.sensor));
1949        frameTime = systemTime();
1950        ALOGD("### Sensor DQBUF done BayerIndex(%d)", index);
1951        bool wait = false;
1952        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
1953        matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
1954        ALOGD("### Matched(%d) last(%d)", matchedFrameCnt, lastFrameCnt);
1955        if (matchedFrameCnt != -1) {
1956            if (matchedFrameCnt == lastFrameCnt)
1957				matchedFrameCnt++;
1958			lastFrameCnt = matchedFrameCnt;
1959			m_scp_closing = false;
1960			m_scp_closed = false;
1961        }
1962
1963        m_BayerManager->MarkSensorDequeue(index, matchedFrameCnt, &frameTime);
1964
1965        m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
1966        ALOGD("### Sensor DQed BayerIndex[%d] passing to ISP. frameCnt(%d) timestamp(%lld)",
1967            index, matchedFrameCnt, frameTime);
1968
1969        if (!(m_ispThread.get()))
1970            return;
1971
1972        m_ispThread->SetSignal(SIGNAL_ISP_START_BAYER_INPUT);
1973
1974        while (m_BayerManager->GetNumOnSensor() <= NUM_SENSOR_QBUF) {
1975
1976            index = m_BayerManager->GetIndexForSensorEnqueue();
1977            if (index == -1) {
1978                ALOGE("ERR(%s) No free Bayer buffer", __FUNCTION__);
1979                break;
1980            }
1981            processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]));
1982
1983            shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
1984            if (processingReqIndex == -1) {
1985                ALOGV("DEBUG(%s) req underrun => inserting bubble to BayerIndex(%d)", __FUNCTION__, index);
1986                memcpy(shot_ext, &(m_camera_info.dummy_shot), sizeof(struct camera2_shot_ext));
1987            }
1988
1989            m_BayerManager->MarkSensorEnqueue(index);
1990            if (m_scp_closing || m_scp_closed) {
1991                ALOGV("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
1992                shot_ext->request_scc = 0;
1993                shot_ext->request_scp = 0;
1994                shot_ext->request_sensor = 0;
1995            }
1996            ALOGD("### Sensor QBUF start BayerIndex[%d]", index);
1997            cam_int_qbuf(&(m_camera_info.sensor), index);
1998            ALOGD("### Sensor QBUF done");
1999        }
2000        if (!m_closing){
2001            ALOGE("!m_closing");
2002            selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2003        }
2004        if (wait) {
2005            ALOGE("###waiting###");
2006            usleep(20000);
2007        }
2008        return;
2009    }
2010    return;
2011}
2012
2013void ExynosCameraHWInterface2::m_ispThreadInitialize(SignalDrivenThread * self)
2014{
2015    ALOGV("DEBUG(%s): ", __FUNCTION__ );
2016    IspThread * selfThread = ((IspThread*)self);
2017    char node_name[30];
2018    int fd = 0;
2019    int i =0, j=0;
2020
2021
2022    while (!m_initFlag1) //temp
2023        usleep(100000);
2024
2025    /*isp init*/
2026    memset(&node_name, 0x00, sizeof(char[30]));
2027    sprintf(node_name, "%s%d", NODE_PREFIX, 41);
2028    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
2029
2030    if (fd < 0) {
2031        ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
2032    }
2033    else {
2034        ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
2035    }
2036    m_camera_info.isp.fd = fd;
2037
2038    m_camera_info.isp.width = m_camera_info.sensor.width;
2039    m_camera_info.isp.height = m_camera_info.sensor.height;
2040    m_camera_info.isp.format = m_camera_info.sensor.format;
2041    m_camera_info.isp.planes = m_camera_info.sensor.planes;
2042    m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
2043    m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2044    m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
2045
2046    for(i = 0; i < m_camera_info.isp.buffers; i++){
2047        initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
2048        m_camera_info.isp.buffer[i].size.extS[0]    = m_camera_info.sensor.buffer[i].size.extS[0];
2049        m_camera_info.isp.buffer[i].size.extS[1]    = m_camera_info.sensor.buffer[i].size.extS[1];
2050        m_camera_info.isp.buffer[i].fd.extFd[0]     = m_camera_info.sensor.buffer[i].fd.extFd[0];
2051        m_camera_info.isp.buffer[i].fd.extFd[1]     = m_camera_info.sensor.buffer[i].fd.extFd[1];
2052        m_camera_info.isp.buffer[i].virt.extP[0]    = m_camera_info.sensor.buffer[i].virt.extP[0];
2053        m_camera_info.isp.buffer[i].virt.extP[1]    = m_camera_info.sensor.buffer[i].virt.extP[1];
2054    };
2055
2056    cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
2057    cam_int_s_fmt(&(m_camera_info.isp));
2058    ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
2059    cam_int_reqbufs(&(m_camera_info.isp));
2060    ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
2061    ALOGV("DEBUG(%s): isp mem alloc done",  __FUNCTION__);
2062
2063    cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
2064    ALOGV("DEBUG(%s): sensor s_input done",  __FUNCTION__);
2065    if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
2066        ALOGE("ERR(%s): sensor s_fmt fail",  __FUNCTION__);
2067    }
2068    ALOGV("DEBUG(%s): sensor s_fmt done",  __FUNCTION__);
2069    cam_int_reqbufs(&(m_camera_info.sensor));
2070    ALOGV("DEBUG(%s): sensor reqbuf done",  __FUNCTION__);
2071    for (i = 0; i < m_camera_info.sensor.buffers; i++) {
2072        ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
2073        memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
2074                sizeof(struct camera2_shot_ext));
2075        m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
2076
2077        cam_int_qbuf(&(m_camera_info.sensor), i);
2078        m_BayerManager->MarkSensorEnqueue(i);
2079    }
2080    ALOGE("== stream_on :: m_camera_info.sensor");
2081    cam_int_streamon(&(m_camera_info.sensor));
2082
2083
2084
2085/*capture init*/
2086    memset(&node_name, 0x00, sizeof(char[30]));
2087    sprintf(node_name, "%s%d", NODE_PREFIX, 42);
2088    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
2089
2090    if (fd < 0) {
2091        ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
2092    }
2093    else {
2094        ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
2095    }
2096    m_camera_info.capture.fd = fd;
2097
2098    m_camera_info.capture.width = getSccOutputSizeX(m_cameraId);
2099    m_camera_info.capture.height = getSccOutputSizeY(m_cameraId);
2100    m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
2101    m_camera_info.capture.planes = 1;
2102    m_camera_info.capture.buffers = 8;
2103    m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
2104    m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
2105    m_camera_info.capture.ionClient = m_ionCameraClient;
2106
2107    for(i = 0; i < m_camera_info.capture.buffers; i++){
2108        initCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
2109        m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
2110        allocCameraMemory(m_camera_info.capture.ionClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
2111    }
2112
2113    cam_int_s_input(&(m_camera_info.capture), m_camera_info.sensor_id);
2114    cam_int_s_fmt(&(m_camera_info.capture));
2115    ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
2116    cam_int_reqbufs(&(m_camera_info.capture));
2117    ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
2118
2119    for (i = 0; i < m_camera_info.capture.buffers; i++) {
2120        ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
2121        cam_int_qbuf(&(m_camera_info.capture), i);
2122    }
2123
2124    ALOGE("== stream_on :: m_camera_info.capture");
2125    cam_int_streamon(&(m_camera_info.capture));
2126
2127    m_initFlag2 = true;
2128    ALOGV("DEBUG(%s): END of IspThreadInitialize ", __FUNCTION__);
2129    return;
2130}
2131
2132
2133void ExynosCameraHWInterface2::m_ispThreadFunc(SignalDrivenThread * self)
2134{
2135    uint32_t        currentSignal = self->GetProcessingSignal();
2136    IspThread *  selfThread      = ((IspThread*)self);
2137    int index;
2138    status_t res;
2139    ALOGV("DEBUG(%s): m_ispThreadFunc (%x)", __FUNCTION__, currentSignal);
2140
2141    if (currentSignal & SIGNAL_THREAD_RELEASE) {
2142        ALOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2143
2144        ALOGV("(%s): calling capture streamoff", __FUNCTION__);
2145        cam_int_streamoff(&(m_camera_info.capture));
2146        ALOGV("(%s): calling capture streamoff done", __FUNCTION__);
2147
2148        ALOGD("(%s): EXIT  processing SIGNAL_THREAD_RELEASE ", __FUNCTION__);
2149        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2150        return;
2151    }
2152
2153    if (currentSignal & SIGNAL_ISP_START_BAYER_INPUT)
2154    {
2155        struct camera2_shot_ext *shot_ext;
2156        int bayerIndexToEnqueue = 0;
2157        int processingFrameCnt = 0;
2158
2159 	    ALOGV("DEBUG(%s): IspThread processing SIGNAL_ISP_START_BAYER_INPUT", __FUNCTION__);
2160
2161        bayerIndexToEnqueue = m_BayerManager->GetIndexForIspEnqueue(&processingFrameCnt);
2162        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[bayerIndexToEnqueue].virt.extP[1]);
2163
2164        ALOGV("### isp QBUF start bayerIndex[%d] for frameCnt(%d)", bayerIndexToEnqueue, processingFrameCnt);
2165
2166        if (processingFrameCnt != -1) {
2167            ALOGV("### writing output stream info");
2168            m_requestManager->UpdateOutputStreamInfo(shot_ext, processingFrameCnt);
2169            DumpInfoWithShot(shot_ext);
2170        }
2171        else {
2172            memcpy(shot_ext, &(m_camera_info.dummy_shot), sizeof(struct camera2_shot_ext));
2173        }
2174        if (m_scp_flushing) {
2175            shot_ext->request_scp = 1;
2176        }
2177        if (m_scp_closing || m_scp_closed) {
2178            ALOGV("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
2179            shot_ext->request_scc = 0;
2180            shot_ext->request_scp = 0;
2181            shot_ext->request_sensor = 0;
2182        }
2183        cam_int_qbuf(&(m_camera_info.isp), bayerIndexToEnqueue);
2184        ALOGV("### isp QBUF done bayerIndex[%d] scp(%d)", bayerIndexToEnqueue, shot_ext->request_scp);
2185        m_BayerManager->MarkIspEnqueue(bayerIndexToEnqueue);
2186
2187        if (m_BayerManager->GetNumOnHalFilled() != 0) {
2188            // input has priority
2189            selfThread->SetSignal(SIGNAL_ISP_START_BAYER_INPUT);
2190            return;
2191        }
2192        else {
2193            selfThread->SetSignal(SIGNAL_ISP_START_BAYER_DEQUEUE);
2194        }
2195    }
2196
2197    if (currentSignal & SIGNAL_ISP_START_BAYER_DEQUEUE)
2198    {
2199        struct camera2_shot_ext *shot_ext;
2200        int bayerIndexToDequeue = 0;
2201        int processingFrameCnt = 0;
2202 	    ALOGV("DEBUG(%s): IspThread processing SIGNAL_ISP_START_BAYER_DEQUEUE", __FUNCTION__);
2203
2204        bayerIndexToDequeue = m_BayerManager->GetIndexForIspDequeue(&processingFrameCnt);
2205        m_ispProcessingFrameCnt = processingFrameCnt;
2206        m_previewOutput = 0;
2207        m_recordOutput = 0;
2208        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[bayerIndexToDequeue].virt.extP[1]);
2209        if (processingFrameCnt != -1 || m_scp_flushing) // bubble
2210        {
2211            if (shot_ext->request_scc) {
2212                m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
2213            }
2214            m_previewOutput = shot_ext->shot.ctl.request.outputStreams[0];
2215            m_recordOutput = shot_ext->shot.ctl.request.outputStreams[2];
2216            if (m_previewOutput || m_recordOutput) {
2217                m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
2218            }
2219            m_lastTimeStamp = systemTime();
2220        }
2221        ALOGD("### isp DQBUF start");
2222        index = cam_int_dqbuf(&(m_camera_info.isp));
2223        ALOGD("### isp DQBUF done bayerIndex(%d) for frameCnt(%d)", index, processingFrameCnt);
2224        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
2225        ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
2226           shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
2227        if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
2228            ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
2229            m_scp_closed = true;
2230        }
2231        else
2232            m_scp_closed = false;
2233        if (processingFrameCnt != -1) {
2234	        DumpInfoWithShot(shot_ext);
2235            m_requestManager->ApplyDynamicMetadata(shot_ext, processingFrameCnt);
2236        }
2237        m_BayerManager->MarkIspDequeue(index);
2238        if (m_BayerManager->GetNumOnIsp() != 0) {
2239            selfThread->SetSignal(SIGNAL_ISP_START_BAYER_DEQUEUE);
2240        }
2241    }
2242
2243    return;
2244}
2245
2246void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self)
2247{
2248    StreamThread *          selfThread      = ((StreamThread*)self);
2249    ALOGV("DEBUG(%s): ", __FUNCTION__ );
2250    memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t));
2251    selfThread->m_isBufferInit = false;
2252
2253    return;
2254}
2255
2256void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
2257{
2258    uint32_t                currentSignal   = self->GetProcessingSignal();
2259    StreamThread *          selfThread      = ((StreamThread*)self);
2260    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
2261    record_parameters_t     *selfRecordParms =  &(selfThread->m_recordParameters);
2262    node_info_t             *currentNode    = &(selfStreamParms->node);
2263
2264    ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal);
2265
2266    if (currentSignal & SIGNAL_STREAM_CHANGE_PARAMETER) {
2267        ALOGV("DEBUG(%s): processing SIGNAL_STREAM_CHANGE_PARAMETER", __FUNCTION__);
2268        selfThread->applyChange();
2269        if (selfStreamParms->streamType==1) {
2270            m_resizeBuf.size.extS[0] = ALIGN(selfStreamParms->outputWidth, 16) * ALIGN(selfStreamParms->outputHeight, 16) * 2;
2271            m_resizeBuf.size.extS[1] = 0;
2272            m_resizeBuf.size.extS[2] = 0;
2273
2274            if (allocCameraMemory(selfStreamParms->ionClient, &m_resizeBuf, 1) == -1) {
2275                ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__);
2276            }
2277        }
2278        ALOGV("DEBUG(%s): processing SIGNAL_STREAM_CHANGE_PARAMETER DONE", __FUNCTION__);
2279    }
2280
2281    if (currentSignal & SIGNAL_THREAD_RELEASE) {
2282        int i, index = -1, cnt_to_dq = 0;
2283        status_t res;
2284        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2285
2286
2287
2288        if (selfThread->m_isBufferInit) {
2289            for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
2290                ALOGV("DEBUG(%s): checking buffer index[%d] - status(%d)",
2291                    __FUNCTION__, i, selfStreamParms->svcBufStatus[i]);
2292                if (selfStreamParms->svcBufStatus[i] ==ON_DRIVER) cnt_to_dq++;
2293            }
2294            m_scp_flushing = true;
2295            ALOGV("DEBUG(%s): cnt to dq (%d)", __FUNCTION__, cnt_to_dq);
2296            /* TO CHECK
2297            for ( i=0 ; i < cnt_to_dq ; i++) {
2298                ALOGV("@@@@@@ dq start");
2299                index = cam_int_dqbuf(&(selfStreamParms->node));
2300                ALOGV("@@@@@@ dq done, index(%d)", index);
2301                if (index >=0 && index < selfStreamParms->numSvcBuffers) {
2302                    selfStreamParms->svcBufStatus[index] = ON_HAL;
2303                }
2304            }
2305            */
2306            m_scp_flushing = false;
2307            m_scp_closing = true;
2308            ALOGV("DEBUG(%s): calling stream(%d) streamoff (fd:%d)", __FUNCTION__,
2309            selfThread->m_index, selfStreamParms->fd);
2310            cam_int_streamoff(&(selfStreamParms->node));
2311            ALOGV("DEBUG(%s): calling stream(%d) streamoff done", __FUNCTION__, selfThread->m_index);
2312
2313            for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
2314                ALOGV("DEBUG(%s): releasing buffer index[%d] - status(%d)",
2315                    __FUNCTION__, i, selfStreamParms->svcBufStatus[i]);
2316
2317                switch (selfStreamParms->svcBufStatus[i]) {
2318
2319                case ON_DRIVER:
2320                    //ALOGV("@@@@@@ this should not happen");
2321                case ON_HAL:
2322                    res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
2323                            &(selfStreamParms->svcBufHandle[i]));
2324                    if (res != NO_ERROR ) {
2325                        ALOGE("ERR(%s): unable to cancel buffer : %d",__FUNCTION__ , res);
2326                         // TODO : verify after service is ready
2327                         // return;
2328                    }
2329                    break;
2330                case ON_SERVICE:
2331                default:
2332                    break;
2333
2334                }
2335            }
2336        }
2337        if (m_resizeBuf.size.s != 0) {
2338            freeCameraMemory(&m_resizeBuf, 1);
2339        }
2340        if (m_resizeBuf2.size.s != 0) {
2341            freeCameraMemory(&m_resizeBuf2, 1);
2342        }
2343        selfThread->m_isBufferInit = false;
2344        selfThread->m_index = 255;
2345
2346        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
2347
2348        //selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2349        return;
2350    }
2351
2352    if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
2353        buffer_handle_t * buf = NULL;
2354        status_t res;
2355        void *virtAddr[3];
2356        int i, j;
2357        int index;
2358        ALOGV("DEBUG(%s): stream(%d) processing SIGNAL_STREAM_DATA_COMING",
2359            __FUNCTION__,selfThread->m_index);
2360        if (!(selfThread->m_isBufferInit)) {
2361            for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
2362                res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
2363                if (res != NO_ERROR || buf == NULL) {
2364                    ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
2365                    return;
2366                }
2367                ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
2368                   ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
2369
2370                if (m_grallocHal->lock(m_grallocHal, *buf,
2371                           selfStreamParms->usage,
2372                           0, 0, selfStreamParms->outputWidth, selfStreamParms->outputHeight, virtAddr) != 0) {
2373                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
2374                    return;
2375                }
2376                ALOGV("DEBUG(%s): locked img buf plane0(%x) plane1(%x) plane2(%x)",
2377                __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
2378
2379                index = selfThread->findBufferIndex(virtAddr[0]);
2380                if (index == -1) {
2381                    ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
2382                }
2383                else {
2384                    ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
2385                        __FUNCTION__, index, selfStreamParms->svcBufStatus[index]);
2386                    if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC)
2387                        selfStreamParms->svcBufStatus[index] = ON_DRIVER;
2388                    else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE)
2389                        selfStreamParms->svcBufStatus[index] = ON_HAL;
2390                    else {
2391                        ALOGV("DBG(%s): buffer status abnormal (%d) "
2392                            , __FUNCTION__, selfStreamParms->svcBufStatus[index]);
2393                    }
2394                    if (*buf != selfStreamParms->svcBufHandle[index])
2395                        ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
2396                    else
2397                        ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
2398                }
2399                m_svcBufIndex = 0;
2400            }
2401            selfThread->m_isBufferInit = true;
2402        }
2403
2404        if (m_recordingEnabled && m_needsRecordBufferInit) {
2405            ALOGV("DEBUG(%s): Recording Buffer Initialization numsvcbuf(%d)",
2406                __FUNCTION__, selfRecordParms->numSvcBuffers);
2407
2408            m_resizeBuf2.size.extS[0] = ALIGN(selfRecordParms->outputWidth, 32) * ALIGN(selfRecordParms->outputHeight, 32) * 4;
2409            m_resizeBuf2.size.extS[1] =  0;
2410            m_resizeBuf2.size.extS[2] =  0;
2411            ALOGV("DEBUG(%s): resizebuf2 size0(%d) size1(%d)", __FUNCTION__, m_resizeBuf2.size.extS[0], m_resizeBuf2.size.extS[1]);
2412            if (allocCameraMemory(selfStreamParms->ionClient, &m_resizeBuf2, 1) == -1) {
2413                ALOGE("ERR(%s): Failed to allocate resize buf2", __FUNCTION__);
2414            }
2415
2416            int checkingIndex = 0;
2417            bool found = false;
2418            for ( i=0 ; i < selfRecordParms->numSvcBuffers; i++) {
2419                res = selfRecordParms->streamOps->dequeue_buffer(selfRecordParms->streamOps, &buf);
2420                if (res != NO_ERROR || buf == NULL) {
2421                    ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
2422                    return;
2423                }
2424                selfRecordParms->numBufsInHal++;
2425                ALOGV("DEBUG(%s): [record] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
2426                   selfRecordParms->numBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
2427
2428                if (m_grallocHal->lock(m_grallocHal, *buf,
2429                       selfRecordParms->usage, 0, 0,
2430                       selfRecordParms->outputWidth, selfRecordParms->outputHeight, virtAddr) != 0) {
2431                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
2432                }
2433                else {
2434                      ALOGV("DEBUG(%s): [record] locked img buf plane0(%x) plane1(%x) plane2(%x)",
2435                        __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
2436
2437                }
2438                found = false;
2439                for (checkingIndex = 0; checkingIndex < selfRecordParms->numSvcBuffers ; checkingIndex++) {
2440                    //ALOGV("DEBUG(%s) : comparing %d %x  %x", __FUNCTION__, checkingIndex,
2441                    //selfRecordParms->svcBufHandle[checkingIndex], *buf);
2442                    if (selfRecordParms->svcBufHandle[checkingIndex] == *buf ) {
2443                        found = true;
2444                        break;
2445                    }
2446                }
2447                ALOGV("DEBUG(%s): [record] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
2448                if (!found) break;
2449                index = checkingIndex;
2450
2451
2452                if (index == -1) {
2453                    ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
2454                }
2455                else {
2456                    ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
2457                        __FUNCTION__, index, selfRecordParms->svcBufStatus[index]);
2458                    if (selfRecordParms->svcBufStatus[index]== ON_SERVICE)
2459                        selfRecordParms->svcBufStatus[index] = ON_HAL;
2460                    else {
2461                        ALOGV("DBG(%s): buffer status abnormal (%d) "
2462                            , __FUNCTION__, selfRecordParms->svcBufStatus[index]);
2463                    }
2464                    if (*buf != selfRecordParms->svcBufHandle[index])
2465                        ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
2466                    else
2467                        ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
2468                }
2469                selfRecordParms->m_svcBufIndex = 0;
2470            }
2471            m_needsRecordBufferInit = false;
2472        }
2473
2474
2475        if (selfStreamParms->streamType==0) {
2476            ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF START ",__FUNCTION__,
2477                selfThread->m_index, selfStreamParms->streamType);
2478
2479            index = cam_int_dqbuf(&(selfStreamParms->node));
2480            ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
2481                selfThread->m_index, selfStreamParms->streamType, index);
2482
2483
2484            if (selfStreamParms->svcBufStatus[index] !=  ON_DRIVER)
2485                ALOGD("DBG(%s): DQed buffer status abnormal (%d) ",
2486                       __FUNCTION__, selfStreamParms->svcBufStatus[index]);
2487            selfStreamParms->svcBufStatus[index] = ON_HAL;
2488
2489
2490            if (m_recordOutput && m_recordingEnabled) {
2491                ALOGV("DEBUG(%s): Entering record frame creator, index(%d)",__FUNCTION__, selfRecordParms->m_svcBufIndex);
2492                while (selfRecordParms->svcBufStatus[selfRecordParms->m_svcBufIndex] != ON_HAL) {
2493                    ALOGV("DEBUG(%s): finding free recording buffer(%d)", __FUNCTION__,  selfRecordParms->m_svcBufIndex);
2494                    selfRecordParms->m_svcBufIndex++;
2495                }
2496
2497                if (m_exynosVideoCSC) {
2498                    int videoW = selfRecordParms->outputWidth, videoH = selfRecordParms->outputHeight;
2499                    int cropX, cropY, cropW, cropH = 0;
2500                    int previewW = selfStreamParms->outputWidth, previewH = selfStreamParms->outputHeight;
2501
2502                    m_getRatioSize(previewW, previewH,
2503                                    videoW, videoH,
2504                                   &cropX, &cropY,
2505                                   &cropW, &cropH,
2506                                   0);
2507
2508                    ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
2509                             __FUNCTION__, cropX, cropY, cropW, cropH);
2510
2511
2512                    csc_set_src_format(m_exynosVideoCSC,
2513                                       //ALIGN(previewW, 32), ALIGN(previewH, 32),
2514                                       previewW, previewH,
2515                                       cropX, cropY, cropW, cropH,
2516                                       HAL_PIXEL_FORMAT_YV12,
2517                                       0);
2518
2519                    csc_set_dst_format(m_exynosVideoCSC,
2520                                       ALIGN(videoW, 32), ALIGN(videoH, 32),
2521                                       0, 0, videoW, videoH,
2522                                       HAL_PIXEL_FORMAT_RGBA_8888,
2523                                       1);
2524
2525                    ALOGV("DEBUG(%s) [1]-- bufindex(%d)", __FUNCTION__, selfRecordParms->m_svcBufIndex);
2526               /*     if  (m_savecnt == 10)
2527                    {
2528                        m_savePostView("/data/src00_00",
2529                            (uint8_t*)selfStreamParms->svcBuffers[index].virt.extP[0], selfStreamParms->svcBuffers[index].size.extS[0]);
2530
2531                        m_savePostView( "/data/src00_01",
2532                            (uint8_t*)selfStreamParms->svcBuffers[index].virt.extP[1], selfStreamParms->svcBuffers[index].size.extS[1]);
2533
2534                        m_savePostView( "/data/src00_02",
2535                            (uint8_t*)selfStreamParms->svcBuffers[index].virt.extP[2], selfStreamParms->svcBuffers[index].size.extS[2]);
2536                    } */
2537                    /*int tempFd;
2538                    tempFd = selfStreamParms->svcBuffers[index].fd.extFd[2];
2539                    selfStreamParms->svcBuffers[index].fd.extFd[2] = selfStreamParms->svcBuffers[index].fd.extFd[1];
2540                    selfStreamParms->svcBuffers[index].fd.extFd[1] = tempFd;*/
2541                    csc_set_src_buffer(m_exynosVideoCSC,
2542                                   (void **)(&(selfStreamParms->svcBuffers[index].fd.fd)));
2543
2544
2545                    //m_resizeBuf2.fd.extFd[2] = 0;
2546                    for (int i=0 ; i <selfRecordParms->svcPlanes; i++)
2547                        ALOGV("DEBUG(%s): m_resizeBuf2.fd.extFd[%d]=%d addr(%x) m_resizeBuf2.size.extS[%d]=%d",
2548                            __FUNCTION__, i, m_resizeBuf2.fd.extFd[i],  (unsigned int)m_resizeBuf2.virt.extP[i], i,
2549                            m_resizeBuf2.size.extS[i]);
2550                    csc_set_dst_buffer(m_exynosVideoCSC,
2551                                       (void **)(&(m_resizeBuf2.fd.fd)));
2552
2553
2554                    if (csc_convert(m_exynosVideoCSC) != 0) {
2555                        ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__);
2556                    }
2557                    else {
2558                        ALOGE("ERR(%s):csc_convert() SUCCESS", __FUNCTION__);
2559                    }
2560                    /*tempFd = selfStreamParms->svcBuffers[index].fd.extFd[2];
2561                    selfStreamParms->svcBuffers[index].fd.extFd[2] = selfStreamParms->svcBuffers[index].fd.extFd[1];
2562                    selfStreamParms->svcBuffers[index].fd.extFd[1] = tempFd;                    */
2563                  /*  if (m_savecnt == 6)
2564                    {
2565                         m_savePostView( "/data/res00.rgb",
2566                            (uint8_t*)m_resizeBuf2.virt.extP[0], m_resizeBuf2.size.extS[0]);
2567                         //m_savePostView("/data/res00_01",
2568                         //   (uint8_t*)m_resizeBuf2.virt.extP[1], m_resizeBuf2.size.extS[1]);
2569                    }*/
2570                    m_savecnt ++;
2571                    ALOGV("DEBUG(%s): svc addr[0] %x addr[1] %x", __FUNCTION__,
2572                        (unsigned int)selfRecordParms->svcBuffers[selfRecordParms->m_svcBufIndex].virt.extP[0],
2573                        (unsigned int)selfRecordParms->svcBuffers[selfRecordParms->m_svcBufIndex].virt.extP[1]);
2574                    memcpy(selfRecordParms->svcBuffers[selfRecordParms->m_svcBufIndex].virt.extP[0],
2575                        m_resizeBuf2.virt.extP[0], videoW * videoH * 4);
2576                    //memcpy(selfRecordParms->svcBuffers[selfRecordParms->m_svcBufIndex].virt.extP[0]+942080,
2577                    //    m_resizeBuf2.virt.extP[1], m_resizeBuf2.size.extS[1]);
2578
2579                    //memcpy(selfRecordParms->svcBuffers[selfRecordParms->m_svcBufIndex].virt.extP[1],
2580                    //    m_resizeBuf2.virt.extP[1], m_resizeBuf2.size.extS[1]);
2581                    //memset(selfRecordParms->svcBuffers[selfRecordParms->m_svcBufIndex].virt.extP[0],
2582                    //   128, m_resizeBuf2.size.extS[0]);
2583                    //memset(selfRecordParms->svcBuffers[selfRecordParms->m_svcBufIndex].virt.extP[0]+942080,
2584                    //   0, m_resizeBuf2.size.extS[1]);
2585                }
2586                else {
2587                    ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
2588                }
2589
2590/*              res = selfRecordParms->streamOps->enqueue_buffer(selfRecordParms->streamOps,
2591                        m_requestManager->GetTimestamp(m_ispProcessingFrameCnt),
2592                        &(selfRecordParms->svcBufHandle[selfRecordParms->m_svcBufIndex]));*/
2593                res = selfRecordParms->streamOps->enqueue_buffer(selfRecordParms->streamOps,
2594                       systemTime(),
2595                        &(selfRecordParms->svcBufHandle[selfRecordParms->m_svcBufIndex]));
2596                ALOGV("DEBUG(%s): stream(%d) record enqueue_buffer to svc done res(%d)", __FUNCTION__,
2597                    selfThread->m_index, res);
2598                if (res == 0) {
2599                    selfRecordParms->svcBufStatus[selfRecordParms->m_svcBufIndex] = ON_SERVICE;
2600                    selfRecordParms->numBufsInHal--;
2601                }
2602                selfRecordParms->m_svcBufIndex++;
2603                if (selfRecordParms->m_svcBufIndex >= selfRecordParms->numSvcBuffers)
2604                    selfRecordParms->m_svcBufIndex = 0;
2605                m_requestManager->NotifyStreamOutput(m_ispProcessingFrameCnt, 2);
2606
2607            }
2608
2609            if (m_previewOutput) {
2610                res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
2611                        m_requestManager->GetTimestamp(m_ispProcessingFrameCnt), &(selfStreamParms->svcBufHandle[index]));
2612                ALOGV("DEBUG(%s): stream(%d) enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
2613            }
2614            else {
2615                res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
2616                        &(selfStreamParms->svcBufHandle[index]));
2617                ALOGV("DEBUG(%s): stream(%d) cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
2618            }
2619            if (res == 0) {
2620                selfStreamParms->svcBufStatus[index] = ON_SERVICE;
2621            }
2622            else {
2623                selfStreamParms->svcBufStatus[index] = ON_HAL;
2624            }
2625            m_requestManager->NotifyStreamOutput(m_ispProcessingFrameCnt, selfThread->m_index);
2626        }
2627        else if (selfStreamParms->streamType==1) {
2628            ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF START ",__FUNCTION__,
2629                selfThread->m_index, selfStreamParms->streamType);
2630            index = cam_int_dqbuf(&(selfStreamParms->node));
2631            ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
2632                selfThread->m_index, selfStreamParms->streamType, index);
2633
2634            m_jpegEncodingFrameCnt = m_ispProcessingFrameCnt;
2635
2636            bool ret = false;
2637            int pictureW, pictureH, pictureFramesize = 0;
2638            int pictureFormat;
2639            int cropX, cropY, cropW, cropH = 0;
2640
2641
2642            ExynosBuffer jpegBuf;
2643
2644            ExynosRect   m_orgPictureRect;
2645
2646            m_orgPictureRect.w = selfStreamParms->outputWidth;
2647            m_orgPictureRect.h = selfStreamParms->outputHeight;
2648
2649            ExynosBuffer* m_pictureBuf = &(m_camera_info.capture.buffer[index]);
2650
2651            pictureW = getSccOutputSizeX(m_cameraId);
2652            pictureH = getSccOutputSizeY(m_cameraId);
2653            pictureFormat = V4L2_PIX_FMT_YUYV;
2654            pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
2655
2656            if (m_exynosPictureCSC) {
2657                m_getRatioSize(pictureW, pictureH,
2658                               m_orgPictureRect.w, m_orgPictureRect.h,
2659                               &cropX, &cropY,
2660                               &cropW, &cropH,
2661                               0);
2662
2663                ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
2664                      __FUNCTION__, cropX, cropY, cropW, cropH);
2665
2666                csc_set_src_format(m_exynosPictureCSC,
2667                                   ALIGN(pictureW, 16), ALIGN(pictureH, 16),
2668                                   cropX, cropY, cropW, cropH,
2669                                   V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
2670                                   0);
2671
2672                csc_set_dst_format(m_exynosPictureCSC,
2673                                   m_orgPictureRect.w, m_orgPictureRect.h,
2674                                   0, 0, m_orgPictureRect.w, m_orgPictureRect.h,
2675                                   V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
2676                                   0);
2677                csc_set_src_buffer(m_exynosPictureCSC,
2678                                   (void **)&m_pictureBuf->fd.fd);
2679
2680                csc_set_dst_buffer(m_exynosPictureCSC,
2681                                   (void **)&m_resizeBuf.fd.fd);
2682                for (int i=0 ; i < 3 ; i++)
2683                    ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%x m_resizeBuf.size.extS[%d]=%d",
2684                        __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
2685
2686                if (csc_convert(m_exynosPictureCSC) != 0)
2687                    ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__);
2688
2689                for (int i=0 ; i < 3 ; i++)
2690                    ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%x m_resizeBuf.size.extS[%d]=%d",
2691                        __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
2692            }
2693            else {
2694                ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__);
2695            }
2696
2697            m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_orgPictureRect.w, m_orgPictureRect.h, &m_resizeBuf);
2698
2699            for (int i=0 ; i < 3 ; i++) {
2700                ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%x m_resizeBuf.size.extS[%d]=%d",
2701                            __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
2702            }
2703
2704            for (int i = 1; i < 3; i++) {
2705                if (m_resizeBuf.size.extS[i] != 0)
2706                    m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1];
2707
2708                ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]);
2709            }
2710
2711
2712            ExynosRect jpegRect;
2713            bool found = false;
2714            jpegRect.w = m_orgPictureRect.w;
2715            jpegRect.h = m_orgPictureRect.h;
2716            jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
2717
2718            jpegBuf.size.extS[0] = 5*1024*1024;
2719            jpegBuf.size.extS[1] = 0;
2720            jpegBuf.size.extS[2] = 0;
2721
2722            allocCameraMemory(currentNode->ionClient, &jpegBuf, 1);
2723
2724            ALOGV("DEBUG(%s): jpegBuf.size.s = %d , jpegBuf.virt.p = %x", __FUNCTION__,
2725                jpegBuf.size.s, (unsigned int)jpegBuf.virt.p);
2726
2727
2728            if (yuv2Jpeg(&m_resizeBuf, &jpegBuf, &jpegRect) == false)
2729                ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
2730            cam_int_qbuf(&(selfStreamParms->node), index);
2731            ALOGV("DEBUG(%s): stream(%d) type(%d) QBUF DONE ",__FUNCTION__,
2732                selfThread->m_index, selfStreamParms->streamType);
2733
2734            for (int i = 0; i < selfStreamParms->numSvcBuffers ; i++) {
2735                if (selfStreamParms->svcBufStatus[m_svcBufIndex] == ON_HAL) {
2736                    found = true;
2737                    break;
2738                }
2739                m_svcBufIndex++;
2740                if (m_svcBufIndex >= selfStreamParms->numSvcBuffers) m_svcBufIndex = 0;
2741            }
2742            if (!found) {
2743                ALOGE("ERR(%s): NO free SVC buffer for JPEG", __FUNCTION__);
2744            }
2745            else {
2746                memcpy(selfStreamParms->svcBuffers[m_svcBufIndex].virt.extP[0], jpegBuf.virt.extP[0], 5*1024*1024);
2747
2748                res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
2749                        m_requestManager->GetTimestamp(m_jpegEncodingFrameCnt), &(selfStreamParms->svcBufHandle[m_svcBufIndex]));
2750
2751                freeCameraMemory(&jpegBuf, 1);
2752                ALOGV("DEBUG(%s): stream(%d) enqueue_buffer index(%d) to svc done res(%d)",
2753                        __FUNCTION__, selfThread->m_index, m_svcBufIndex, res);
2754                if (res == 0) {
2755                    selfStreamParms->svcBufStatus[m_svcBufIndex] = ON_SERVICE;
2756                }
2757                else {
2758                    selfStreamParms->svcBufStatus[m_svcBufIndex] = ON_HAL;
2759                }
2760                m_requestManager->NotifyStreamOutput(m_jpegEncodingFrameCnt, selfThread->m_index);
2761            }
2762
2763        }
2764        if (selfStreamParms->streamType==0 && m_recordOutput && m_recordingEnabled) {
2765            do {
2766                ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , selfRecordParms->numBufsInHal);
2767                if (selfRecordParms->numBufsInHal>=1)
2768                {
2769                    ALOGV("DEBUG(%s): breaking", __FUNCTION__);
2770                    break;
2771                }
2772                res = selfRecordParms->streamOps->dequeue_buffer(selfRecordParms->streamOps, &buf);
2773                if (res != NO_ERROR || buf == NULL) {
2774                    ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
2775                    break;
2776                }
2777                selfRecordParms->numBufsInHal ++;
2778                ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
2779                   selfRecordParms->numBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
2780                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
2781
2782                bool found = false;
2783                int checkingIndex = 0;
2784                for (checkingIndex = 0; checkingIndex < selfRecordParms->numSvcBuffers ; checkingIndex++) {
2785                    if (priv_handle->fd == selfRecordParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
2786                        found = true;
2787                        break;
2788                    }
2789                }
2790                ALOGV("DEBUG(%s): recording dequeueed_buffer found index(%d)", __FUNCTION__, found);
2791                if (!found) break;
2792                index = checkingIndex;
2793                if (selfRecordParms->svcBufStatus[index] == ON_SERVICE) {
2794                    selfRecordParms->svcBufStatus[index] = ON_HAL;
2795                }
2796                else {
2797                    ALOGV("DEBUG(%s): record bufstatus abnormal [%d]  status = %d", __FUNCTION__,
2798                        index,  selfRecordParms->svcBufStatus[index]);
2799                }
2800            } while (0);
2801        }
2802        while(1) {
2803            res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
2804            if (res != NO_ERROR || buf == NULL) {
2805                ALOGV("DEBUG(%s): stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
2806                break;
2807            }
2808
2809            ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
2810               ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
2811            const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
2812
2813            bool found = false;
2814            int checkingIndex = 0;
2815            for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
2816                if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
2817                    found = true;
2818                    break;
2819                }
2820            }
2821            ALOGV("DEBUG(%s): post_dequeue_buffer found(%d)", __FUNCTION__, found);
2822            if (!found) break;
2823            ALOGV("DEBUG(%s): preparing to qbuf [%d]", __FUNCTION__, checkingIndex);
2824            index = checkingIndex;
2825            if (index < selfStreamParms->numHwBuffers) {
2826                uint32_t    plane_index = 0;
2827                ExynosBuffer*  currentBuf = &(selfStreamParms->svcBuffers[index]);
2828                struct v4l2_buffer v4l2_buf;
2829                struct v4l2_plane  planes[VIDEO_MAX_PLANES];
2830
2831                v4l2_buf.m.planes   = planes;
2832                v4l2_buf.type       = currentNode->type;
2833                v4l2_buf.memory     = currentNode->memory;
2834                v4l2_buf.index      = index;
2835                v4l2_buf.length     = currentNode->planes;
2836
2837                v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2838                v4l2_buf.m.planes[2].m.fd = priv_handle->u_fd;
2839                v4l2_buf.m.planes[1].m.fd = priv_handle->v_fd;
2840                for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
2841                    v4l2_buf.m.planes[plane_index].length  = currentBuf->size.extS[plane_index];
2842                    ALOGV("DEBUG(%s): plane(%d): fd(%d)  length(%d)",
2843                         __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
2844                         v4l2_buf.m.planes[plane_index].length);
2845                }
2846
2847                if (selfStreamParms->streamType == 0) {
2848                    if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
2849                        ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail",
2850                            __FUNCTION__, selfThread->m_index);
2851                        return;
2852                    }
2853                    selfStreamParms->svcBufStatus[index] = ON_DRIVER;
2854                    ALOGV("DEBUG(%s): stream id(%d) type0 QBUF done index(%d)",
2855                        __FUNCTION__, selfThread->m_index, index);
2856                }
2857                else if (selfStreamParms->streamType == 1) {
2858                    selfStreamParms->svcBufStatus[index]  = ON_HAL;
2859                    ALOGV("DEBUG(%s): stream id(%d) type1 DQBUF done index(%d)",
2860                        __FUNCTION__, selfThread->m_index, index);
2861                }
2862            }
2863        }
2864        ALOGV("DEBUG(%s): stream(%d) processing SIGNAL_STREAM_DATA_COMING DONE",
2865            __FUNCTION__,selfThread->m_index);
2866    }
2867    return;
2868}
2869
2870bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
2871                            ExynosBuffer *jpegBuf,
2872                            ExynosRect *rect)
2873{
2874    unsigned char *addr;
2875
2876    ExynosJpegEncoderForCamera jpegEnc;
2877    bool ret = false;
2878    int res = 0;
2879
2880    unsigned int *yuvSize = yuvBuf->size.extS;
2881
2882    if (jpegEnc.create()) {
2883        ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__);
2884        goto jpeg_encode_done;
2885    }
2886
2887    if (jpegEnc.setQuality(100)) {
2888        ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__);
2889        goto jpeg_encode_done;
2890    }
2891
2892    if (jpegEnc.setSize(rect->w, rect->h)) {
2893        ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__);
2894        goto jpeg_encode_done;
2895    }
2896    ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h);
2897
2898    if (jpegEnc.setColorFormat(rect->colorFormat)) {
2899        ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__);
2900        goto jpeg_encode_done;
2901    }
2902    ALOGV("%s : color = %d\n", __FUNCTION__, &(rect->colorFormat));
2903
2904    if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) {
2905        ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
2906        goto jpeg_encode_done;
2907    }
2908#if 0
2909    if (m_curCameraInfo->thumbnailW != 0 && m_curCameraInfo->thumbnailH != 0) {
2910        int thumbW = 0, thumbH = 0;
2911        mExifInfo.enableThumb = true;
2912        if (rect->w < 320 || rect->h < 240) {
2913            thumbW = 160;
2914            thumbH = 120;
2915        } else {
2916            thumbW = m_curCameraInfo->thumbnailW;
2917            thumbH = m_curCameraInfo->thumbnailH;
2918        }
2919        if (jpegEnc.setThumbnailSize(thumbW, thumbH)) {
2920            LOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, thumbW, thumbH);
2921            goto jpeg_encode_done;
2922        }
2923
2924        if (0 < m_jpegThumbnailQuality && m_jpegThumbnailQuality <= 100) {
2925            if (jpegEnc.setThumbnailQuality(m_jpegThumbnailQuality)) {
2926                LOGE("ERR(%s):jpegEnc.setThumbnailQuality(%d) fail", __FUNCTION__, m_jpegThumbnailQuality);
2927                goto jpeg_encode_done;
2928            }
2929        }
2930
2931        m_setExifChangedAttribute(&mExifInfo, rect);
2932    } else
2933#endif
2934    {
2935        mExifInfo.enableThumb = false;
2936    }
2937    ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
2938    /*for (int i=0 ; i < 3 ; i++)
2939            ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() virt.extP[%d]=%x extS[%d]=%d",
2940                __FUNCTION__, i, yuvBuf->fd.extFd[i], i, yuvBuf->size.extS[i]);*/
2941    if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), (int *)yuvSize)) {
2942        ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
2943        goto jpeg_encode_done;
2944    }
2945
2946    if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) {
2947        ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__);
2948        goto jpeg_encode_done;
2949    }
2950    /*for (int i=0 ; i < 3 ; i++)
2951        ALOGV("DEBUG(%s): jpegBuf->virt.extP[%d]=%x   jpegBuf->size.extS[%d]=%d",
2952                __FUNCTION__, i, jpegBuf->fd.extFd[i], i, jpegBuf->size.extS[i]);*/
2953    memset(jpegBuf->virt.p,0,jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2]);
2954
2955    if (jpegEnc.updateConfig()) {
2956        ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__);
2957        goto jpeg_encode_done;
2958    }
2959
2960    if (res = jpegEnc.encode((int *)&jpegBuf->size.s, NULL)) {
2961        ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res);
2962        goto jpeg_encode_done;
2963    }
2964
2965    ret = true;
2966
2967jpeg_encode_done:
2968
2969    if (jpegEnc.flagCreate() == true)
2970        jpegEnc.destroy();
2971
2972    return ret;
2973}
2974
2975
2976ExynosCameraHWInterface2::MainThread::~MainThread()
2977{
2978    ALOGD("(%s):", __FUNCTION__);
2979}
2980
2981void ExynosCameraHWInterface2::MainThread::release()
2982{
2983    ALOGD("(%s):", __func__);
2984    SetSignal(SIGNAL_THREAD_RELEASE);
2985}
2986
2987ExynosCameraHWInterface2::SensorThread::~SensorThread()
2988{
2989    ALOGD("(%s):", __FUNCTION__);
2990}
2991
2992void ExynosCameraHWInterface2::SensorThread::release()
2993{
2994    ALOGD("(%s):", __func__);
2995    SetSignal(SIGNAL_THREAD_RELEASE);
2996}
2997
2998ExynosCameraHWInterface2::IspThread::~IspThread()
2999{
3000    ALOGD("(%s):", __FUNCTION__);
3001}
3002
3003void ExynosCameraHWInterface2::IspThread::release()
3004{
3005    ALOGD("(%s):", __func__);
3006    SetSignal(SIGNAL_THREAD_RELEASE);
3007}
3008
3009ExynosCameraHWInterface2::StreamThread::~StreamThread()
3010{
3011    ALOGD("(%s):", __FUNCTION__);
3012}
3013
3014void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
3015{
3016    ALOGV("DEBUG(%s):", __FUNCTION__);
3017
3018    m_tempParameters = new_parameters;
3019
3020    SetSignal(SIGNAL_STREAM_CHANGE_PARAMETER);
3021
3022    // TODO : return synchronously (after setting parameters asynchronously)
3023    usleep(2000);
3024}
3025
3026void ExynosCameraHWInterface2::StreamThread::applyChange()
3027{
3028    memcpy(&m_parameters, m_tempParameters, sizeof(stream_parameters_t));
3029
3030    ALOGV("DEBUG(%s):  Applying Stream paremeters  width(%d), height(%d)",
3031            __FUNCTION__, m_parameters.outputWidth, m_parameters.outputHeight);
3032}
3033
3034void ExynosCameraHWInterface2::StreamThread::release()
3035{
3036    ALOGV("(%s):", __func__);
3037    SetSignal(SIGNAL_THREAD_RELEASE);
3038}
3039
3040int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr)
3041{
3042    int index;
3043    for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
3044        if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr)
3045            return index;
3046    }
3047    return -1;
3048}
3049
3050void ExynosCameraHWInterface2::StreamThread::setRecordingParameter(record_parameters_t * recordParm)
3051{
3052    memcpy(&m_recordParameters, recordParm, sizeof(record_parameters_t));
3053}
3054
3055int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
3056{
3057    if (ionClient == 0) {
3058        ionClient = ion_client_create();
3059        if (ionClient < 0) {
3060            ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient);
3061            return 0;
3062        }
3063    }
3064
3065    return ionClient;
3066}
3067
3068int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient)
3069{
3070    if (ionClient != 0) {
3071        if (ionClient > 0) {
3072            ion_client_destroy(ionClient);
3073        }
3074        ionClient = 0;
3075    }
3076
3077    return ionClient;
3078}
3079
3080int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum)
3081{
3082    int ret = 0;
3083    int i = 0;
3084
3085    if (ionClient == 0) {
3086        ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient);
3087        return -1;
3088    }
3089
3090    for (i=0;i<iMemoryNum;i++) {
3091        if (buf->size.extS[i] == 0) {
3092            break;
3093        }
3094
3095        buf->fd.extFd[i] = ion_alloc(ionClient, \
3096                                      buf->size.extS[i], 0, ION_HEAP_EXYNOS_MASK,0);
3097        if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) {
3098            ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]);
3099            buf->fd.extFd[i] = -1;
3100            freeCameraMemory(buf, iMemoryNum);
3101            return -1;
3102        }
3103
3104        buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \
3105                                        buf->size.extS[i], 0);
3106        if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) {
3107            ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]);
3108            buf->virt.extP[i] = (char *)MAP_FAILED;
3109            freeCameraMemory(buf, iMemoryNum);
3110            return -1;
3111        }
3112        ALOGV("allocCameraMem : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]);
3113    }
3114
3115    return ret;
3116}
3117
3118void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum)
3119{
3120
3121    int i =0 ;
3122
3123    for (i=0;i<iMemoryNum;i++) {
3124        if (buf->fd.extFd[i] != -1) {
3125            if (buf->virt.extP[i] != (char *)MAP_FAILED) {
3126                ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
3127            }
3128            ion_free(buf->fd.extFd[i]);
3129        }
3130        buf->fd.extFd[i] = -1;
3131        buf->virt.extP[i] = (char *)MAP_FAILED;
3132        buf->size.extS[i] = 0;
3133    }
3134}
3135
3136void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum)
3137{
3138    int i =0 ;
3139    for (i=0;i<iMemoryNum;i++) {
3140        buf->virt.extP[i] = (char *)MAP_FAILED;
3141        buf->fd.extFd[i] = -1;
3142        buf->size.extS[i] = 0;
3143    }
3144}
3145
3146
3147
3148
3149static camera2_device_t *g_cam2_device = NULL;
3150
3151static int HAL2_camera_device_close(struct hw_device_t* device)
3152{
3153    ALOGD("%s: ENTER", __FUNCTION__);
3154    if (device) {
3155
3156        camera2_device_t *cam_device = (camera2_device_t *)device;
3157        ALOGD("cam_device(0x%08x):", (unsigned int)cam_device);
3158        ALOGD("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
3159        delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
3160        g_cam2_device = NULL;
3161        free(cam_device);
3162    }
3163    ALOGD("%s: EXIT", __FUNCTION__);
3164    return 0;
3165}
3166
3167static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev)
3168{
3169    return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv);
3170}
3171
3172static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev,
3173            const camera2_request_queue_src_ops_t *request_src_ops)
3174{
3175    ALOGV("DEBUG(%s):", __FUNCTION__);
3176    return obj(dev)->setRequestQueueSrcOps(request_src_ops);
3177}
3178
3179static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev)
3180{
3181    ALOGV("DEBUG(%s):", __FUNCTION__);
3182    return obj(dev)->notifyRequestQueueNotEmpty();
3183}
3184
3185static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev,
3186            const camera2_frame_queue_dst_ops_t *frame_dst_ops)
3187{
3188    ALOGV("DEBUG(%s):", __FUNCTION__);
3189    return obj(dev)->setFrameQueueDstOps(frame_dst_ops);
3190}
3191
3192static int HAL2_device_get_in_progress_count(const struct camera2_device *dev)
3193{
3194    ALOGV("DEBUG(%s):", __FUNCTION__);
3195    return obj(dev)->getInProgressCount();
3196}
3197
3198static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev)
3199{
3200    ALOGV("DEBUG(%s):", __FUNCTION__);
3201    return obj(dev)->flushCapturesInProgress();
3202}
3203
3204static int HAL2_device_construct_default_request(const struct camera2_device *dev,
3205            int request_template, camera_metadata_t **request)
3206{
3207    ALOGV("DEBUG(%s):", __FUNCTION__);
3208    return obj(dev)->constructDefaultRequest(request_template, request);
3209}
3210
3211static int HAL2_device_allocate_stream(
3212            const struct camera2_device *dev,
3213            // inputs
3214            uint32_t width,
3215            uint32_t height,
3216            int      format,
3217            const camera2_stream_ops_t *stream_ops,
3218            // outputs
3219            uint32_t *stream_id,
3220            uint32_t *format_actual,
3221            uint32_t *usage,
3222            uint32_t *max_buffers)
3223{
3224    ALOGV("(%s): ", __FUNCTION__);
3225    return obj(dev)->allocateStream(width, height, format, stream_ops,
3226                                    stream_id, format_actual, usage, max_buffers);
3227}
3228
3229
3230static int HAL2_device_register_stream_buffers(const struct camera2_device *dev,
3231            uint32_t stream_id,
3232            int num_buffers,
3233            buffer_handle_t *buffers)
3234{
3235    ALOGV("DEBUG(%s):", __FUNCTION__);
3236    return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers);
3237}
3238
3239static int HAL2_device_release_stream(
3240        const struct camera2_device *dev,
3241            uint32_t stream_id)
3242{
3243    ALOGD("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
3244    return obj(dev)->releaseStream(stream_id);
3245}
3246
3247static int HAL2_device_allocate_reprocess_stream(
3248           const struct camera2_device *dev,
3249            uint32_t width,
3250            uint32_t height,
3251            uint32_t format,
3252            const camera2_stream_in_ops_t *reprocess_stream_ops,
3253            // outputs
3254            uint32_t *stream_id,
3255            uint32_t *consumer_usage,
3256            uint32_t *max_buffers)
3257{
3258    ALOGV("DEBUG(%s):", __FUNCTION__);
3259    return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops,
3260                                    stream_id, consumer_usage, max_buffers);
3261}
3262
3263static int HAL2_device_release_reprocess_stream(
3264        const struct camera2_device *dev,
3265            uint32_t stream_id)
3266{
3267    ALOGV("DEBUG(%s):", __FUNCTION__);
3268    return obj(dev)->releaseReprocessStream(stream_id);
3269}
3270
3271static int HAL2_device_trigger_action(const struct camera2_device *dev,
3272           uint32_t trigger_id,
3273            int ext1,
3274            int ext2)
3275{
3276    ALOGV("DEBUG(%s):", __FUNCTION__);
3277    return obj(dev)->triggerAction(trigger_id, ext1, ext2);
3278}
3279
3280static int HAL2_device_set_notify_callback(const struct camera2_device *dev,
3281            camera2_notify_callback notify_cb,
3282            void *user)
3283{
3284    ALOGV("DEBUG(%s):", __FUNCTION__);
3285    return obj(dev)->setNotifyCallback(notify_cb, user);
3286}
3287
3288static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev,
3289            vendor_tag_query_ops_t **ops)
3290{
3291    ALOGV("DEBUG(%s):", __FUNCTION__);
3292    return obj(dev)->getMetadataVendorTagOps(ops);
3293}
3294
3295static int HAL2_device_dump(const struct camera2_device *dev, int fd)
3296{
3297    ALOGV("DEBUG(%s):", __FUNCTION__);
3298    return obj(dev)->dump(fd);
3299}
3300
3301
3302
3303
3304
3305static int HAL2_getNumberOfCameras()
3306{
3307    ALOGV("(%s): returning 2", __FUNCTION__);
3308    return 2;
3309}
3310
3311
3312static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
3313{
3314    ALOGD("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
3315    static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
3316
3317    status_t res;
3318
3319    if (cameraId == 0)
3320        info->facing = CAMERA_FACING_BACK;
3321    else
3322        info->facing = CAMERA_FACING_FRONT;
3323    info->orientation = 0;
3324    info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0);
3325    if (mCameraInfo[cameraId] == NULL) {
3326        res = constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true);
3327        if (res != OK) {
3328            ALOGE("%s: Unable to allocate static info: %s (%d)",
3329                    __FUNCTION__, strerror(-res), res);
3330            return res;
3331        }
3332        res = constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false);
3333        if (res != OK) {
3334            ALOGE("%s: Unable to fill in static info: %s (%d)",
3335                    __FUNCTION__, strerror(-res), res);
3336            return res;
3337        }
3338    }
3339    info->static_camera_characteristics = mCameraInfo[cameraId];
3340    return NO_ERROR;
3341}
3342
3343#define SET_METHOD(m) m : HAL2_device_##m
3344
3345static camera2_device_ops_t camera2_device_ops = {
3346        SET_METHOD(set_request_queue_src_ops),
3347        SET_METHOD(notify_request_queue_not_empty),
3348        SET_METHOD(set_frame_queue_dst_ops),
3349        SET_METHOD(get_in_progress_count),
3350        SET_METHOD(flush_captures_in_progress),
3351        SET_METHOD(construct_default_request),
3352        SET_METHOD(allocate_stream),
3353        SET_METHOD(register_stream_buffers),
3354        SET_METHOD(release_stream),
3355        SET_METHOD(allocate_reprocess_stream),
3356        SET_METHOD(release_reprocess_stream),
3357        SET_METHOD(trigger_action),
3358        SET_METHOD(set_notify_callback),
3359        SET_METHOD(get_metadata_vendor_tag_ops),
3360        SET_METHOD(dump),
3361};
3362
3363#undef SET_METHOD
3364
3365
3366static int HAL2_camera_device_open(const struct hw_module_t* module,
3367                                  const char *id,
3368                                  struct hw_device_t** device)
3369{
3370
3371
3372    int cameraId = atoi(id);
3373
3374    ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
3375    if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
3376        ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
3377        return -EINVAL;
3378    }
3379
3380    ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
3381    if (g_cam2_device) {
3382        if (obj(g_cam2_device)->getCameraId() == cameraId) {
3383            ALOGV("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
3384            goto done;
3385        } else {
3386
3387            while (g_cam2_device)
3388                usleep(10000);
3389            /*ALOGE("ERR(%s):Cannot open camera %d. camera %d is already running!",
3390                    __FUNCTION__, cameraId, obj(g_cam2_device)->getCameraId());
3391            return -ENOSYS;*/
3392        }
3393    }
3394
3395    g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
3396    ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
3397
3398    if (!g_cam2_device)
3399        return -ENOMEM;
3400
3401    g_cam2_device->common.tag     = HARDWARE_DEVICE_TAG;
3402    g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0;
3403    g_cam2_device->common.module  = const_cast<hw_module_t *>(module);
3404    g_cam2_device->common.close   = HAL2_camera_device_close;
3405
3406    g_cam2_device->ops = &camera2_device_ops;
3407
3408    ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id);
3409
3410    g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device);
3411
3412done:
3413    *device = (hw_device_t *)g_cam2_device;
3414    ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device);
3415
3416    return 0;
3417}
3418
3419
3420static hw_module_methods_t camera_module_methods = {
3421            open : HAL2_camera_device_open
3422};
3423
3424extern "C" {
3425    struct camera_module HAL_MODULE_INFO_SYM = {
3426      common : {
3427          tag                : HARDWARE_MODULE_TAG,
3428          module_api_version : CAMERA_MODULE_API_VERSION_2_0,
3429          hal_api_version    : HARDWARE_HAL_API_VERSION,
3430          id                 : CAMERA_HARDWARE_MODULE_ID,
3431          name               : "Exynos Camera HAL2",
3432          author             : "Samsung Corporation",
3433          methods            : &camera_module_methods,
3434          dso:                NULL,
3435          reserved:           {0},
3436      },
3437      get_number_of_cameras : HAL2_getNumberOfCameras,
3438      get_camera_info       : HAL2_getCameraInfo
3439    };
3440}
3441
3442}; // namespace android
3443