ItsService.java revision f8f681e8367f4ebf5e8b1124d6631388e4b5dea2
1/*
2 * Copyright (C) 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17package com.android.camera2.its;
18
19import android.app.Service;
20import android.content.Context;
21import android.content.Intent;
22import android.graphics.ImageFormat;
23import android.hardware.camera2.CameraAccessException;
24import android.hardware.camera2.CameraCharacteristics;
25import android.hardware.camera2.CameraDevice;
26import android.hardware.camera2.CameraManager;
27import android.hardware.camera2.CaptureFailure;
28import android.hardware.camera2.CaptureRequest;
29import android.hardware.camera2.CaptureResult;
30import android.hardware.camera2.Rational;
31import android.media.Image;
32import android.media.ImageReader;
33import android.net.Uri;
34import android.os.ConditionVariable;
35import android.os.Handler;
36import android.os.HandlerThread;
37import android.os.IBinder;
38import android.os.Message;
39import android.util.Log;
40import android.view.Surface;
41
42import com.android.ex.camera2.blocking.BlockingCameraManager;
43import com.android.ex.camera2.blocking.BlockingCameraManager.BlockingOpenException;
44
45import org.json.JSONObject;
46
47import java.io.File;
48import java.nio.ByteBuffer;
49import java.util.ArrayList;
50import java.util.Arrays;
51import java.util.List;
52import java.util.concurrent.CountDownLatch;
53import java.util.concurrent.TimeUnit;
54
55public class ItsService extends Service {
56    public static final String TAG = ItsService.class.getSimpleName();
57    public static final String PYTAG = "CAMERA-ITS-PY";
58
59    // Supported intents
60    public static final String ACTION_CAPTURE = "com.android.camera2.its.CAPTURE";
61    public static final String ACTION_3A = "com.android.camera2.its.3A";
62    public static final String ACTION_GETPROPS = "com.android.camera2.its.GETPROPS";
63    private static final int MESSAGE_CAPTURE = 1;
64    private static final int MESSAGE_3A = 2;
65    private static final int MESSAGE_GETPROPS = 3;
66
67    // Timeouts, in seconds.
68    public static final int TIMEOUT_CAPTURE = 10;
69    public static final int TIMEOUT_3A = 10;
70
71    private static final int MAX_CONCURRENT_READER_BUFFERS = 8;
72
73    public static final String REGION_KEY = "regions";
74    public static final String REGION_AE_KEY = "ae";
75    public static final String REGION_AWB_KEY = "awb";
76    public static final String REGION_AF_KEY = "af";
77    public static final String TRIGGER_KEY = "triggers";
78    public static final String TRIGGER_AE_KEY = "ae";
79    public static final String TRIGGER_AF_KEY = "af";
80
81    private CameraManager mCameraManager = null;
82    private BlockingCameraManager mBlockingCameraManager = null;
83    private CameraDevice mCamera = null;
84    private ImageReader mCaptureReader = null;
85    private CameraCharacteristics mCameraCharacteristics = null;
86
87    private HandlerThread mCommandThread;
88    private Handler mCommandHandler;
89    private HandlerThread mSaveThread;
90    private Handler mSaveHandler;
91    private HandlerThread mResultThread;
92    private Handler mResultHandler;
93
94    private ConditionVariable mInterlock3A = new ConditionVariable(true);
95    private volatile boolean mIssuedRequest3A = false;
96    private volatile boolean mConvergedAE = false;
97    private volatile boolean mConvergedAF = false;
98    private volatile boolean mConvergedAWB = false;
99
100    private CountDownLatch mCaptureCallbackLatch;
101
102    public interface CaptureListener {
103        void onCaptureAvailable(Image capture);
104    }
105
106    public abstract class CaptureResultListener extends CameraDevice.CaptureListener {}
107
108    @Override
109    public IBinder onBind(Intent intent) {
110        return null;
111    }
112
113    @Override
114    public void onCreate() {
115
116        try {
117            // Get handle to camera manager.
118            mCameraManager = (CameraManager) this.getSystemService(Context.CAMERA_SERVICE);
119            if (mCameraManager == null) {
120                throw new ItsException("Failed to connect to camera manager");
121            }
122            mBlockingCameraManager = new BlockingCameraManager(mCameraManager);
123
124            // Open the camera device, and get its properties.
125            String[] devices;
126            try {
127                devices = mCameraManager.getCameraIdList();
128                if (devices == null || devices.length == 0) {
129                    throw new ItsException("No camera devices");
130                }
131            } catch (CameraAccessException e) {
132                throw new ItsException("Failed to get device ID list", e);
133            }
134
135            HandlerThread openThread = new HandlerThread("OpenThread");
136            try {
137                openThread.start();
138                Handler openHandler = new Handler(openThread.getLooper());
139
140                // TODO: Add support for specifying which device to open.
141                mCamera = mBlockingCameraManager.openCamera(devices[0], /*listener*/null,
142                        openHandler);
143                mCameraCharacteristics = mCameraManager.getCameraCharacteristics(devices[0]);
144            } catch (CameraAccessException e) {
145                throw new ItsException("Failed to open camera", e);
146            } catch (BlockingOpenException e) {
147                throw new ItsException("Failed to open camera (after blocking)", e);
148            } finally {
149                /**
150                 * OK to shut down thread immediately after #openCamera since there is no listener.
151                 * If listener ever becomes non-null then handler's thread must be valid for
152                 * the full lifetime of the listener.
153                 */
154                openThread.quitSafely();
155            }
156
157            // Create a thread to receive images and save them.
158            mSaveThread = new HandlerThread("SaveThread");
159            mSaveThread.start();
160            mSaveHandler = new Handler(mSaveThread.getLooper());
161
162            // Create a thread to receive capture results and process them
163            mResultThread = new HandlerThread("ResultThread");
164            mResultThread.start();
165            mResultHandler = new Handler(mResultThread.getLooper());
166
167            // Create a thread to process commands.
168            mCommandThread = new HandlerThread("CaptureThread");
169            mCommandThread.start();
170            mCommandHandler = new Handler(mCommandThread.getLooper(), new Handler.Callback() {
171                @Override
172                public boolean handleMessage(Message msg) {
173                    try {
174                        switch (msg.what) {
175                            case MESSAGE_CAPTURE:
176                                doCapture((Uri) msg.obj);
177                                break;
178                            case MESSAGE_3A:
179                                do3A((Uri) msg.obj);
180                                break;
181                            case MESSAGE_GETPROPS:
182                                doGetProps();
183                                break;
184                            default:
185                                throw new ItsException("Unknown message type");
186                        }
187                        Log.i(PYTAG, "### DONE");
188                        return true;
189                    }
190                    catch (ItsException e) {
191                        Log.e(TAG, "Script failed: ", e);
192                        Log.e(PYTAG, "### FAIL");
193                        return true;
194                    }
195                }
196            });
197        } catch (ItsException e) {
198            Log.e(TAG, "Script failed: ", e);
199            Log.e(PYTAG, "### FAIL");
200        }
201    }
202
203    @Override
204    public void onDestroy() {
205        try {
206            if (mCommandThread != null) {
207                mCommandThread.quit();
208                mCommandThread = null;
209            }
210            if (mSaveThread != null) {
211                mSaveThread.quit();
212                mSaveThread = null;
213            }
214
215            try {
216                mCamera.close();
217            } catch (Exception e) {
218                throw new ItsException("Failed to close device");
219            }
220        } catch (ItsException e) {
221            Log.e(TAG, "Script failed: ", e);
222            Log.e(PYTAG, "### FAIL");
223        }
224    }
225
226    @Override
227    public int onStartCommand(Intent intent, int flags, int startId) {
228        try {
229            Log.i(PYTAG, "### RECV");
230            String action = intent.getAction();
231            if (ACTION_CAPTURE.equals(action)) {
232                Uri uri = intent.getData();
233                Message m = mCommandHandler.obtainMessage(MESSAGE_CAPTURE, uri);
234                mCommandHandler.sendMessage(m);
235            } else if (ACTION_3A.equals(action)) {
236                Uri uri = intent.getData();
237                Message m = mCommandHandler.obtainMessage(MESSAGE_3A, uri);
238                mCommandHandler.sendMessage(m);
239            } else if (ACTION_GETPROPS.equals(action)) {
240                Uri uri = intent.getData();
241                Message m = mCommandHandler.obtainMessage(MESSAGE_GETPROPS, uri);
242                mCommandHandler.sendMessage(m);
243            } else {
244                throw new ItsException("Unhandled intent: " + intent.toString());
245            }
246        } catch (ItsException e) {
247            Log.e(TAG, "Script failed: ", e);
248            Log.e(PYTAG, "### FAIL");
249        }
250        return START_STICKY;
251    }
252
253    public void idleCamera() throws ItsException {
254        try {
255            mCamera.stopRepeating();
256            mCamera.waitUntilIdle();
257        } catch (CameraAccessException e) {
258            throw new ItsException("Error waiting for camera idle", e);
259        }
260    }
261
262    private ImageReader.OnImageAvailableListener
263            createAvailableListener(final CaptureListener listener) {
264        return new ImageReader.OnImageAvailableListener() {
265            @Override
266            public void onImageAvailable(ImageReader reader) {
267                Image i = null;
268                try {
269                    i = reader.acquireNextImage();
270                    listener.onCaptureAvailable(i);
271                } finally {
272                    if (i != null) {
273                        i.close();
274                    }
275                }
276            }
277        };
278    }
279
280    private ImageReader.OnImageAvailableListener
281            createAvailableListenerDropper(final CaptureListener listener) {
282        return new ImageReader.OnImageAvailableListener() {
283            @Override
284            public void onImageAvailable(ImageReader reader) {
285                Image i = reader.acquireNextImage();
286                i.close();
287            }
288        };
289    }
290
291    private void doGetProps() throws ItsException {
292        String fileName = ItsUtils.getMetadataFileName(0);
293        File mdFile = ItsUtils.getOutputFile(ItsService.this, fileName);
294        ItsUtils.storeCameraCharacteristics(mCameraCharacteristics, mdFile);
295        Log.i(PYTAG,
296              String.format("### FILE %s",
297                            ItsUtils.getExternallyVisiblePath(ItsService.this, mdFile.toString())));
298    }
299
300    private void prepareCaptureReader(int width, int height, int format) {
301        if (mCaptureReader == null
302                || mCaptureReader.getWidth() != width
303                || mCaptureReader.getHeight() != height
304                || mCaptureReader.getImageFormat() != format) {
305            if (mCaptureReader != null) {
306                mCaptureReader.close();
307            }
308            mCaptureReader = ImageReader.newInstance(width, height, format,
309                    MAX_CONCURRENT_READER_BUFFERS);
310        }
311    }
312
313    private void do3A(Uri uri) throws ItsException {
314        try {
315            if (uri == null || !uri.toString().endsWith(".json")) {
316                throw new ItsException("Invalid URI: " + uri);
317            }
318
319            idleCamera();
320
321            // Start a 3A action, and wait for it to converge.
322            // Get the converged values for each "A", and package into JSON result for caller.
323
324            // 3A happens on full-res frames.
325            android.hardware.camera2.Size sizes[] = mCameraCharacteristics.get(
326                    CameraCharacteristics.SCALER_AVAILABLE_JPEG_SIZES);
327            int width = sizes[0].getWidth();
328            int height = sizes[0].getHeight();
329            int format = ImageFormat.YUV_420_888;
330
331            prepareCaptureReader(width, height, format);
332            List<Surface> outputSurfaces = new ArrayList<Surface>(1);
333            outputSurfaces.add(mCaptureReader.getSurface());
334            mCamera.configureOutputs(outputSurfaces);
335
336            // Add a listener that just recycles buffers; they aren't saved anywhere.
337            ImageReader.OnImageAvailableListener readerListener =
338                    createAvailableListenerDropper(mCaptureListener);
339            mCaptureReader.setOnImageAvailableListener(readerListener, mSaveHandler);
340
341            // Get the user-specified regions for AE, AWB, AF.
342            // Note that the user specifies normalized [x,y,w,h], which is converted below
343            // to an [x0,y0,x1,y1] region in sensor coords. The capture request region
344            // also has a fifth "weight" element: [x0,y0,x1,y1,w].
345            int[] regionAE = new int[]{0,0,width-1,height-1,1};
346            int[] regionAF = new int[]{0,0,width-1,height-1,1};
347            int[] regionAWB = new int[]{0,0,width-1,height-1,1};
348            JSONObject params = ItsUtils.loadJsonFile(uri);
349            if (params.has(REGION_KEY)) {
350                JSONObject regions = params.getJSONObject(REGION_KEY);
351                if (regions.has(REGION_AE_KEY)) {
352                    int[] r = ItsUtils.getJsonRectFromArray(
353                            regions.getJSONArray(REGION_AE_KEY), true, width, height);
354                    regionAE = new int[]{r[0],r[1],r[0]+r[2]-1,r[1]+r[3]-1,1};
355                }
356                if (regions.has(REGION_AF_KEY)) {
357                    int[] r = ItsUtils.getJsonRectFromArray(
358                            regions.getJSONArray(REGION_AF_KEY), true, width, height);
359                    regionAF = new int[]{r[0],r[1],r[0]+r[2]-1,r[1]+r[3]-1,1};
360                }
361                if (regions.has(REGION_AWB_KEY)) {
362                    int[] r = ItsUtils.getJsonRectFromArray(
363                            regions.getJSONArray(REGION_AWB_KEY), true, width, height);
364                    regionAWB = new int[]{r[0],r[1],r[0]+r[2]-1,r[1]+r[3]-1,1};
365                }
366            }
367            Log.i(TAG, "AE region: " + Arrays.toString(regionAE));
368            Log.i(TAG, "AF region: " + Arrays.toString(regionAF));
369            Log.i(TAG, "AWB region: " + Arrays.toString(regionAWB));
370
371            // By default, AE and AF both get triggered, but the user can optionally override this.
372            boolean doAE = true;
373            boolean doAF = true;
374            if (params.has(TRIGGER_KEY)) {
375                JSONObject triggers = params.getJSONObject(TRIGGER_KEY);
376                if (triggers.has(TRIGGER_AE_KEY)) {
377                    doAE = triggers.getBoolean(TRIGGER_AE_KEY);
378                }
379                if (triggers.has(TRIGGER_AF_KEY)) {
380                    doAF = triggers.getBoolean(TRIGGER_AF_KEY);
381                }
382            }
383
384            mInterlock3A.open();
385            mIssuedRequest3A = false;
386            mConvergedAE = false;
387            mConvergedAWB = false;
388            mConvergedAF = false;
389            long tstart = System.currentTimeMillis();
390            boolean triggeredAE = false;
391            boolean triggeredAF = false;
392
393            // Keep issuing capture requests until 3A has converged.
394            // First do AE, then do AF and AWB together.
395            while (true) {
396
397                // Block until can take the next 3A frame. Only want one outstanding frame
398                // at a time, to simplify the logic here.
399                if (!mInterlock3A.block(TIMEOUT_3A * 1000) ||
400                        System.currentTimeMillis() - tstart > TIMEOUT_3A * 1000) {
401                    throw new ItsException("3A failed to converge (timeout)");
402                }
403                mInterlock3A.close();
404
405                // If not converged yet, issue another capture request.
406                if ((doAE && !mConvergedAE) || !mConvergedAWB || (doAF && !mConvergedAF)) {
407
408                    // Baseline capture request for 3A.
409                    CaptureRequest.Builder req = mCamera.createCaptureRequest(
410                            CameraDevice.TEMPLATE_PREVIEW);
411                    req.set(CaptureRequest.FLASH_MODE, CaptureRequest.FLASH_MODE_OFF);
412                    req.set(CaptureRequest.CONTROL_MODE, CaptureRequest.CONTROL_MODE_AUTO);
413                    req.set(CaptureRequest.CONTROL_CAPTURE_INTENT,
414                            CaptureRequest.CONTROL_CAPTURE_INTENT_PREVIEW);
415                    req.set(CaptureRequest.CONTROL_AE_MODE,
416                            CaptureRequest.CONTROL_AE_MODE_ON);
417                    req.set(CaptureRequest.CONTROL_AE_EXPOSURE_COMPENSATION, 0);
418                    req.set(CaptureRequest.CONTROL_AE_LOCK, false);
419                    req.set(CaptureRequest.CONTROL_AE_REGIONS, regionAE);
420                    req.set(CaptureRequest.CONTROL_AF_MODE,
421                            CaptureRequest.CONTROL_AF_MODE_AUTO);
422                    req.set(CaptureRequest.CONTROL_AF_REGIONS, regionAF);
423                    req.set(CaptureRequest.CONTROL_AWB_MODE,
424                            CaptureRequest.CONTROL_AWB_MODE_AUTO);
425                    req.set(CaptureRequest.CONTROL_AWB_LOCK, false);
426                    req.set(CaptureRequest.CONTROL_AWB_REGIONS, regionAWB);
427
428                    // Trigger AE first.
429                    if (doAE && !triggeredAE) {
430                        Log.i(TAG, "Triggering AE");
431                        req.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
432                                CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
433                        triggeredAE = true;
434                    }
435
436                    // After AE has converged, trigger AF.
437                    if (doAF && !triggeredAF && (!doAE || (triggeredAE && mConvergedAE))) {
438                        Log.i(TAG, "Triggering AF");
439                        req.set(CaptureRequest.CONTROL_AF_TRIGGER,
440                                CaptureRequest.CONTROL_AF_TRIGGER_START);
441                        triggeredAF = true;
442                    }
443
444                    req.addTarget(mCaptureReader.getSurface());
445
446                    mIssuedRequest3A = true;
447                    mCamera.capture(req.build(), mCaptureResultListener, mResultHandler);
448                } else {
449                    Log.i(TAG, "3A converged");
450                    break;
451                }
452            }
453        } catch (android.hardware.camera2.CameraAccessException e) {
454            throw new ItsException("Access error: ", e);
455        } catch (org.json.JSONException e) {
456            throw new ItsException("JSON error: ", e);
457        }
458    }
459
460    private void doCapture(Uri uri) throws ItsException {
461        try {
462            if (uri == null || !uri.toString().endsWith(".json")) {
463                throw new ItsException("Invalid URI: " + uri);
464            }
465
466            idleCamera();
467
468            // Parse the JSON to get the list of capture requests.
469            List<CaptureRequest.Builder> requests = ItsUtils.loadRequestList(mCamera, uri);
470
471            // Set the output surface and listeners.
472            try {
473                // Default:
474                // Capture full-frame images. Use the reported JPEG size rather than the sensor
475                // size since this is more likely to be the unscaled size; the crop from sensor
476                // size is probably for the ISP (e.g. demosaicking) rather than the encoder.
477                android.hardware.camera2.Size sizes[] = mCameraCharacteristics.get(
478                        CameraCharacteristics.SCALER_AVAILABLE_JPEG_SIZES);
479                int width = sizes[0].getWidth();
480                int height = sizes[0].getHeight();
481                int format = ImageFormat.YUV_420_888;
482
483                JSONObject jsonOutputSpecs = ItsUtils.getOutputSpecs(uri);
484                if (jsonOutputSpecs != null) {
485                    // Use the user's JSON capture spec.
486                    int width2 = jsonOutputSpecs.optInt("width");
487                    int height2 = jsonOutputSpecs.optInt("height");
488                    if (width2 > 0) {
489                        width = width2;
490                    }
491                    if (height2 > 0) {
492                        height = height2;
493                    }
494                    String sformat = jsonOutputSpecs.optString("format");
495                    if ("yuv".equals(sformat)) {
496                        format = ImageFormat.YUV_420_888;
497                    } else if ("jpg".equals(sformat) || "jpeg".equals(sformat)) {
498                        format = ImageFormat.JPEG;
499                    } else if ("".equals(sformat)) {
500                        // No format specified.
501                    } else {
502                        throw new ItsException("Unsupported format: " + sformat);
503                    }
504                }
505
506                Log.i(PYTAG, String.format("### SIZE %d %d", width, height));
507
508                prepareCaptureReader(width, height, format);
509                List<Surface> outputSurfaces = new ArrayList<Surface>(1);
510                outputSurfaces.add(mCaptureReader.getSurface());
511                mCamera.configureOutputs(outputSurfaces);
512
513                ImageReader.OnImageAvailableListener readerListener =
514                        createAvailableListener(mCaptureListener);
515                mCaptureReader.setOnImageAvailableListener(readerListener, mSaveHandler);
516
517                // Plan for how many callbacks need to be received throughout the duration of this
518                // sequence of capture requests.
519                int numCaptures = requests.size();
520                mCaptureCallbackLatch = new CountDownLatch(
521                        numCaptures * ItsUtils.getCallbacksPerCapture(format));
522
523            } catch (CameraAccessException e) {
524                throw new ItsException("Error configuring outputs", e);
525            }
526
527            // Initiate the captures.
528            for (int i = 0; i < requests.size(); i++) {
529                CaptureRequest.Builder req = requests.get(i);
530                Log.i(PYTAG, String.format("### CAPT %d of %d", i+1, requests.size()));
531                req.addTarget(mCaptureReader.getSurface());
532                mCamera.capture(req.build(), mCaptureResultListener, mResultHandler);
533            }
534
535            // Make sure all callbacks have been hit (wait until captures are done).
536            try {
537                if (!mCaptureCallbackLatch.await(TIMEOUT_CAPTURE, TimeUnit.SECONDS)) {
538                    throw new ItsException(
539                            "Timeout hit, but all callbacks not received");
540                }
541            } catch (InterruptedException e) {
542                throw new ItsException("Interrupted: ", e);
543            }
544
545        } catch (android.hardware.camera2.CameraAccessException e) {
546            throw new ItsException("Access error: ", e);
547        }
548    }
549
550    private final CaptureListener mCaptureListener = new CaptureListener() {
551        @Override
552        public void onCaptureAvailable(Image capture) {
553            try {
554                int format = capture.getFormat();
555                String extFileName = null;
556                if (format == ImageFormat.JPEG) {
557                    String fileName = ItsUtils.getJpegFileName(capture.getTimestamp());
558                    ByteBuffer buf = capture.getPlanes()[0].getBuffer();
559                    extFileName = ItsUtils.writeImageToFile(ItsService.this, buf, fileName);
560                } else if (format == ImageFormat.YUV_420_888) {
561                    String fileName = ItsUtils.getYuvFileName(capture.getTimestamp());
562                    byte[] img = ItsUtils.getDataFromImage(capture);
563                    ByteBuffer buf = ByteBuffer.wrap(img);
564                    extFileName = ItsUtils.writeImageToFile(ItsService.this, buf, fileName);
565                } else {
566                    throw new ItsException("Unsupported image format: " + format);
567                }
568                Log.i(PYTAG, String.format("### FILE %s", extFileName));
569                mCaptureCallbackLatch.countDown();
570            } catch (ItsException e) {
571                Log.e(TAG, "Script error: " + e);
572                Log.e(PYTAG, "### FAIL");
573            }
574        }
575    };
576
577    private static float r2f(Rational r) {
578        return (float)r.getNumerator() / (float)r.getDenominator();
579    }
580
581    private final CaptureResultListener mCaptureResultListener = new CaptureResultListener() {
582        @Override
583        public void onCaptureStarted(CameraDevice camera, CaptureRequest request, long timestamp) {
584        }
585
586        @Override
587        public void onCaptureCompleted(CameraDevice camera, CaptureRequest request,
588                CaptureResult result) {
589            try {
590                // Currently result has all 0 values.
591                if (request == null || result == null) {
592                    throw new ItsException("Request/result is invalid");
593                }
594
595                StringBuilder logMsg = new StringBuilder();
596                logMsg.append(String.format(
597                        "Capt result: AE=%d, AF=%d, AWB=%d, sens=%d, exp=%.1fms, dur=%.1fms, ",
598                        result.get(CaptureResult.CONTROL_AE_STATE),
599                        result.get(CaptureResult.CONTROL_AF_STATE),
600                        result.get(CaptureResult.CONTROL_AWB_STATE),
601                        result.get(CaptureResult.SENSOR_SENSITIVITY),
602                        result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue() / 1000000.0f,
603                        result.get(CaptureResult.SENSOR_FRAME_DURATION).intValue() / 1000000.0f));
604                if (result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null) {
605                    logMsg.append(String.format(
606                            "gains=[%.1f, %.1f, %.1f, %.1f], ",
607                            result.get(CaptureResult.COLOR_CORRECTION_GAINS)[0],
608                            result.get(CaptureResult.COLOR_CORRECTION_GAINS)[1],
609                            result.get(CaptureResult.COLOR_CORRECTION_GAINS)[2],
610                            result.get(CaptureResult.COLOR_CORRECTION_GAINS)[3]));
611                } else {
612                    logMsg.append("gains=[], ");
613                }
614                if (result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) {
615                    logMsg.append(String.format(
616                            "xform=[%.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f, %.1f], ",
617                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[0]),
618                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[1]),
619                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[2]),
620                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[3]),
621                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[4]),
622                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[5]),
623                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[6]),
624                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[7]),
625                             r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[8])));
626                } else {
627                    logMsg.append("xform=[], ");
628                }
629                logMsg.append(String.format(
630                        "foc=%.1f",
631                        result.get(CaptureResult.LENS_FOCUS_DISTANCE)));
632                Log.i(TAG, logMsg.toString());
633
634                mConvergedAE = result.get(CaptureResult.CONTROL_AE_STATE) ==
635                                          CaptureResult.CONTROL_AE_STATE_CONVERGED;
636                mConvergedAF = result.get(CaptureResult.CONTROL_AF_STATE) ==
637                                          CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED;
638                mConvergedAWB = result.get(CaptureResult.CONTROL_AWB_STATE) ==
639                                           CaptureResult.CONTROL_AWB_STATE_CONVERGED;
640
641                if (mConvergedAE) {
642                    Log.i(PYTAG, String.format(
643                            "### 3A-E %d %d",
644                            result.get(CaptureResult.SENSOR_SENSITIVITY).intValue(),
645                            result.get(CaptureResult.SENSOR_EXPOSURE_TIME).intValue()
646                            ));
647                }
648
649                if (mConvergedAF) {
650                    Log.i(PYTAG, String.format(
651                            "### 3A-F %f",
652                            result.get(CaptureResult.LENS_FOCUS_DISTANCE)
653                            ));
654                }
655
656                if (mConvergedAWB && result.get(CaptureResult.COLOR_CORRECTION_GAINS) != null
657                        && result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM) != null) {
658                    Log.i(PYTAG, String.format(
659                            "### 3A-W %f %f %f %f %f %f %f %f %f %f %f %f %f",
660                            result.get(CaptureResult.COLOR_CORRECTION_GAINS)[0],
661                            result.get(CaptureResult.COLOR_CORRECTION_GAINS)[1],
662                            result.get(CaptureResult.COLOR_CORRECTION_GAINS)[2],
663                            result.get(CaptureResult.COLOR_CORRECTION_GAINS)[3],
664                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[0]),
665                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[1]),
666                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[2]),
667                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[3]),
668                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[4]),
669                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[5]),
670                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[6]),
671                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[7]),
672                            r2f(result.get(CaptureResult.COLOR_CORRECTION_TRANSFORM)[8])
673                            ));
674                }
675
676                if (mIssuedRequest3A) {
677                    mIssuedRequest3A = false;
678                    mInterlock3A.open();
679                } else {
680                    String fileName = ItsUtils.getMetadataFileName(
681                            result.get(CaptureResult.SENSOR_TIMESTAMP));
682                    File mdFile = ItsUtils.getOutputFile(ItsService.this, fileName);
683                    ItsUtils.storeResults(mCameraCharacteristics, request, result, mdFile);
684                    mCaptureCallbackLatch.countDown();
685                }
686            } catch (ItsException e) {
687                Log.e(TAG, "Script error: " + e);
688                Log.e(PYTAG, "### FAIL");
689            } catch (Exception e) {
690                Log.e(TAG, "Script error: " + e);
691                Log.e(PYTAG, "### FAIL");
692            }
693        }
694
695        @Override
696        public void onCaptureFailed(CameraDevice camera, CaptureRequest request,
697                CaptureFailure failure) {
698            mCaptureCallbackLatch.countDown();
699            Log.e(TAG, "Script error: capture failed");
700            Log.e(PYTAG, "### FAIL");
701        }
702    };
703
704}
705