1/*
2 * Copyright (C) 2010 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17//#define LOG_NDEBUG 0
18#define LOG_TAG "ACodec"
19
20#ifdef __LP64__
21#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
22#endif
23
24#include <inttypes.h>
25#include <utils/Trace.h>
26
27#include <gui/Surface.h>
28
29#include <media/stagefright/ACodec.h>
30
31#include <binder/MemoryDealer.h>
32
33#include <media/stagefright/foundation/hexdump.h>
34#include <media/stagefright/foundation/ABuffer.h>
35#include <media/stagefright/foundation/ADebug.h>
36#include <media/stagefright/foundation/AMessage.h>
37#include <media/stagefright/foundation/AUtils.h>
38
39#include <media/stagefright/BufferProducerWrapper.h>
40#include <media/stagefright/MediaCodec.h>
41#include <media/stagefright/MediaCodecList.h>
42#include <media/stagefright/MediaDefs.h>
43#include <media/stagefright/OMXClient.h>
44#include <media/stagefright/PersistentSurface.h>
45#include <media/stagefright/SurfaceUtils.h>
46#include <media/hardware/HardwareAPI.h>
47
48#include <OMX_AudioExt.h>
49#include <OMX_VideoExt.h>
50#include <OMX_Component.h>
51#include <OMX_IndexExt.h>
52#include <OMX_AsString.h>
53
54#include "include/avc_utils.h"
55#include "include/DataConverter.h"
56#include "omx/OMXUtils.h"
57
58namespace android {
59
60enum {
61    kMaxIndicesToCheck = 32, // used when enumerating supported formats and profiles
62};
63
64// OMX errors are directly mapped into status_t range if
65// there is no corresponding MediaError status code.
66// Use the statusFromOMXError(int32_t omxError) function.
67//
68// Currently this is a direct map.
69// See frameworks/native/include/media/openmax/OMX_Core.h
70//
71// Vendor OMX errors     from 0x90000000 - 0x9000FFFF
72// Extension OMX errors  from 0x8F000000 - 0x90000000
73// Standard OMX errors   from 0x80001000 - 0x80001024 (0x80001024 current)
74//
75
76// returns true if err is a recognized OMX error code.
77// as OMX error is OMX_S32, this is an int32_t type
78static inline bool isOMXError(int32_t err) {
79    return (ERROR_CODEC_MIN <= err && err <= ERROR_CODEC_MAX);
80}
81
82// converts an OMX error to a status_t
83static inline status_t statusFromOMXError(int32_t omxError) {
84    switch (omxError) {
85    case OMX_ErrorInvalidComponentName:
86    case OMX_ErrorComponentNotFound:
87        return NAME_NOT_FOUND; // can trigger illegal argument error for provided names.
88    default:
89        return isOMXError(omxError) ? omxError : 0; // no translation required
90    }
91}
92
93// checks and converts status_t to a non-side-effect status_t
94static inline status_t makeNoSideEffectStatus(status_t err) {
95    switch (err) {
96    // the following errors have side effects and may come
97    // from other code modules. Remap for safety reasons.
98    case INVALID_OPERATION:
99    case DEAD_OBJECT:
100        return UNKNOWN_ERROR;
101    default:
102        return err;
103    }
104}
105
106struct MessageList : public RefBase {
107    MessageList() {
108    }
109    virtual ~MessageList() {
110    }
111    std::list<sp<AMessage> > &getList() { return mList; }
112private:
113    std::list<sp<AMessage> > mList;
114
115    DISALLOW_EVIL_CONSTRUCTORS(MessageList);
116};
117
118static sp<DataConverter> getCopyConverter() {
119    static pthread_once_t once = PTHREAD_ONCE_INIT; // const-inited
120    static sp<DataConverter> sCopyConverter;        // zero-inited
121    pthread_once(&once, [](){ sCopyConverter = new DataConverter(); });
122    return sCopyConverter;
123}
124
125struct CodecObserver : public BnOMXObserver {
126    CodecObserver() {}
127
128    void setNotificationMessage(const sp<AMessage> &msg) {
129        mNotify = msg;
130    }
131
132    // from IOMXObserver
133    virtual void onMessages(const std::list<omx_message> &messages) {
134        if (messages.empty()) {
135            return;
136        }
137
138        sp<AMessage> notify = mNotify->dup();
139        bool first = true;
140        sp<MessageList> msgList = new MessageList();
141        for (std::list<omx_message>::const_iterator it = messages.cbegin();
142              it != messages.cend(); ++it) {
143            const omx_message &omx_msg = *it;
144            if (first) {
145                notify->setInt32("node", omx_msg.node);
146                first = false;
147            }
148
149            sp<AMessage> msg = new AMessage;
150            msg->setInt32("type", omx_msg.type);
151            switch (omx_msg.type) {
152                case omx_message::EVENT:
153                {
154                    msg->setInt32("event", omx_msg.u.event_data.event);
155                    msg->setInt32("data1", omx_msg.u.event_data.data1);
156                    msg->setInt32("data2", omx_msg.u.event_data.data2);
157                    break;
158                }
159
160                case omx_message::EMPTY_BUFFER_DONE:
161                {
162                    msg->setInt32("buffer", omx_msg.u.buffer_data.buffer);
163                    msg->setInt32("fence_fd", omx_msg.fenceFd);
164                    break;
165                }
166
167                case omx_message::FILL_BUFFER_DONE:
168                {
169                    msg->setInt32(
170                            "buffer", omx_msg.u.extended_buffer_data.buffer);
171                    msg->setInt32(
172                            "range_offset",
173                            omx_msg.u.extended_buffer_data.range_offset);
174                    msg->setInt32(
175                            "range_length",
176                            omx_msg.u.extended_buffer_data.range_length);
177                    msg->setInt32(
178                            "flags",
179                            omx_msg.u.extended_buffer_data.flags);
180                    msg->setInt64(
181                            "timestamp",
182                            omx_msg.u.extended_buffer_data.timestamp);
183                    msg->setInt32(
184                            "fence_fd", omx_msg.fenceFd);
185                    break;
186                }
187
188                case omx_message::FRAME_RENDERED:
189                {
190                    msg->setInt64(
191                            "media_time_us", omx_msg.u.render_data.timestamp);
192                    msg->setInt64(
193                            "system_nano", omx_msg.u.render_data.nanoTime);
194                    break;
195                }
196
197                default:
198                    ALOGE("Unrecognized message type: %d", omx_msg.type);
199                    break;
200            }
201            msgList->getList().push_back(msg);
202        }
203        notify->setObject("messages", msgList);
204        notify->post();
205    }
206
207protected:
208    virtual ~CodecObserver() {}
209
210private:
211    sp<AMessage> mNotify;
212
213    DISALLOW_EVIL_CONSTRUCTORS(CodecObserver);
214};
215
216////////////////////////////////////////////////////////////////////////////////
217
218struct ACodec::BaseState : public AState {
219    BaseState(ACodec *codec, const sp<AState> &parentState = NULL);
220
221protected:
222    enum PortMode {
223        KEEP_BUFFERS,
224        RESUBMIT_BUFFERS,
225        FREE_BUFFERS,
226    };
227
228    ACodec *mCodec;
229
230    virtual PortMode getPortMode(OMX_U32 portIndex);
231
232    virtual bool onMessageReceived(const sp<AMessage> &msg);
233
234    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
235
236    virtual void onOutputBufferDrained(const sp<AMessage> &msg);
237    virtual void onInputBufferFilled(const sp<AMessage> &msg);
238
239    void postFillThisBuffer(BufferInfo *info);
240
241private:
242    // Handles an OMX message. Returns true iff message was handled.
243    bool onOMXMessage(const sp<AMessage> &msg);
244
245    // Handles a list of messages. Returns true iff messages were handled.
246    bool onOMXMessageList(const sp<AMessage> &msg);
247
248    // returns true iff this message is for this component and the component is alive
249    bool checkOMXMessage(const sp<AMessage> &msg);
250
251    bool onOMXEmptyBufferDone(IOMX::buffer_id bufferID, int fenceFd);
252
253    bool onOMXFillBufferDone(
254            IOMX::buffer_id bufferID,
255            size_t rangeOffset, size_t rangeLength,
256            OMX_U32 flags,
257            int64_t timeUs,
258            int fenceFd);
259
260    virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
261
262    void getMoreInputDataIfPossible();
263
264    DISALLOW_EVIL_CONSTRUCTORS(BaseState);
265};
266
267////////////////////////////////////////////////////////////////////////////////
268
269struct ACodec::DeathNotifier : public IBinder::DeathRecipient {
270    DeathNotifier(const sp<AMessage> &notify)
271        : mNotify(notify) {
272    }
273
274    virtual void binderDied(const wp<IBinder> &) {
275        mNotify->post();
276    }
277
278protected:
279    virtual ~DeathNotifier() {}
280
281private:
282    sp<AMessage> mNotify;
283
284    DISALLOW_EVIL_CONSTRUCTORS(DeathNotifier);
285};
286
287struct ACodec::UninitializedState : public ACodec::BaseState {
288    UninitializedState(ACodec *codec);
289
290protected:
291    virtual bool onMessageReceived(const sp<AMessage> &msg);
292    virtual void stateEntered();
293
294private:
295    void onSetup(const sp<AMessage> &msg);
296    bool onAllocateComponent(const sp<AMessage> &msg);
297
298    sp<DeathNotifier> mDeathNotifier;
299
300    DISALLOW_EVIL_CONSTRUCTORS(UninitializedState);
301};
302
303////////////////////////////////////////////////////////////////////////////////
304
305struct ACodec::LoadedState : public ACodec::BaseState {
306    LoadedState(ACodec *codec);
307
308protected:
309    virtual bool onMessageReceived(const sp<AMessage> &msg);
310    virtual void stateEntered();
311
312private:
313    friend struct ACodec::UninitializedState;
314
315    bool onConfigureComponent(const sp<AMessage> &msg);
316    void onCreateInputSurface(const sp<AMessage> &msg);
317    void onSetInputSurface(const sp<AMessage> &msg);
318    void onStart();
319    void onShutdown(bool keepComponentAllocated);
320
321    status_t setupInputSurface();
322
323    DISALLOW_EVIL_CONSTRUCTORS(LoadedState);
324};
325
326////////////////////////////////////////////////////////////////////////////////
327
328struct ACodec::LoadedToIdleState : public ACodec::BaseState {
329    LoadedToIdleState(ACodec *codec);
330
331protected:
332    virtual bool onMessageReceived(const sp<AMessage> &msg);
333    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
334    virtual void stateEntered();
335
336private:
337    status_t allocateBuffers();
338
339    DISALLOW_EVIL_CONSTRUCTORS(LoadedToIdleState);
340};
341
342////////////////////////////////////////////////////////////////////////////////
343
344struct ACodec::IdleToExecutingState : public ACodec::BaseState {
345    IdleToExecutingState(ACodec *codec);
346
347protected:
348    virtual bool onMessageReceived(const sp<AMessage> &msg);
349    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
350    virtual void stateEntered();
351
352private:
353    DISALLOW_EVIL_CONSTRUCTORS(IdleToExecutingState);
354};
355
356////////////////////////////////////////////////////////////////////////////////
357
358struct ACodec::ExecutingState : public ACodec::BaseState {
359    ExecutingState(ACodec *codec);
360
361    void submitRegularOutputBuffers();
362    void submitOutputMetaBuffers();
363    void submitOutputBuffers();
364
365    // Submit output buffers to the decoder, submit input buffers to client
366    // to fill with data.
367    void resume();
368
369    // Returns true iff input and output buffers are in play.
370    bool active() const { return mActive; }
371
372protected:
373    virtual PortMode getPortMode(OMX_U32 portIndex);
374    virtual bool onMessageReceived(const sp<AMessage> &msg);
375    virtual void stateEntered();
376
377    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
378    virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
379
380private:
381    bool mActive;
382
383    DISALLOW_EVIL_CONSTRUCTORS(ExecutingState);
384};
385
386////////////////////////////////////////////////////////////////////////////////
387
388struct ACodec::OutputPortSettingsChangedState : public ACodec::BaseState {
389    OutputPortSettingsChangedState(ACodec *codec);
390
391protected:
392    virtual PortMode getPortMode(OMX_U32 portIndex);
393    virtual bool onMessageReceived(const sp<AMessage> &msg);
394    virtual void stateEntered();
395
396    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
397    virtual bool onOMXFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano);
398
399private:
400    DISALLOW_EVIL_CONSTRUCTORS(OutputPortSettingsChangedState);
401};
402
403////////////////////////////////////////////////////////////////////////////////
404
405struct ACodec::ExecutingToIdleState : public ACodec::BaseState {
406    ExecutingToIdleState(ACodec *codec);
407
408protected:
409    virtual bool onMessageReceived(const sp<AMessage> &msg);
410    virtual void stateEntered();
411
412    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
413
414    virtual void onOutputBufferDrained(const sp<AMessage> &msg);
415    virtual void onInputBufferFilled(const sp<AMessage> &msg);
416
417private:
418    void changeStateIfWeOwnAllBuffers();
419
420    bool mComponentNowIdle;
421
422    DISALLOW_EVIL_CONSTRUCTORS(ExecutingToIdleState);
423};
424
425////////////////////////////////////////////////////////////////////////////////
426
427struct ACodec::IdleToLoadedState : public ACodec::BaseState {
428    IdleToLoadedState(ACodec *codec);
429
430protected:
431    virtual bool onMessageReceived(const sp<AMessage> &msg);
432    virtual void stateEntered();
433
434    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
435
436private:
437    DISALLOW_EVIL_CONSTRUCTORS(IdleToLoadedState);
438};
439
440////////////////////////////////////////////////////////////////////////////////
441
442struct ACodec::FlushingState : public ACodec::BaseState {
443    FlushingState(ACodec *codec);
444
445protected:
446    virtual bool onMessageReceived(const sp<AMessage> &msg);
447    virtual void stateEntered();
448
449    virtual bool onOMXEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2);
450
451    virtual void onOutputBufferDrained(const sp<AMessage> &msg);
452    virtual void onInputBufferFilled(const sp<AMessage> &msg);
453
454private:
455    bool mFlushComplete[2];
456
457    void changeStateIfWeOwnAllBuffers();
458
459    DISALLOW_EVIL_CONSTRUCTORS(FlushingState);
460};
461
462////////////////////////////////////////////////////////////////////////////////
463
464void ACodec::BufferInfo::setWriteFence(int fenceFd, const char *dbg) {
465    if (mFenceFd >= 0) {
466        ALOGW("OVERWRITE OF %s fence %d by write fence %d in %s",
467                mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg);
468    }
469    mFenceFd = fenceFd;
470    mIsReadFence = false;
471}
472
473void ACodec::BufferInfo::setReadFence(int fenceFd, const char *dbg) {
474    if (mFenceFd >= 0) {
475        ALOGW("OVERWRITE OF %s fence %d by read fence %d in %s",
476                mIsReadFence ? "read" : "write", mFenceFd, fenceFd, dbg);
477    }
478    mFenceFd = fenceFd;
479    mIsReadFence = true;
480}
481
482void ACodec::BufferInfo::checkWriteFence(const char *dbg) {
483    if (mFenceFd >= 0 && mIsReadFence) {
484        ALOGD("REUSING read fence %d as write fence in %s", mFenceFd, dbg);
485    }
486}
487
488void ACodec::BufferInfo::checkReadFence(const char *dbg) {
489    if (mFenceFd >= 0 && !mIsReadFence) {
490        ALOGD("REUSING write fence %d as read fence in %s", mFenceFd, dbg);
491    }
492}
493
494////////////////////////////////////////////////////////////////////////////////
495
496ACodec::ACodec()
497    : mQuirks(0),
498      mNode(0),
499      mUsingNativeWindow(false),
500      mNativeWindowUsageBits(0),
501      mLastNativeWindowDataSpace(HAL_DATASPACE_UNKNOWN),
502      mIsVideo(false),
503      mIsEncoder(false),
504      mFatalError(false),
505      mShutdownInProgress(false),
506      mExplicitShutdown(false),
507      mIsLegacyVP9Decoder(false),
508      mEncoderDelay(0),
509      mEncoderPadding(0),
510      mRotationDegrees(0),
511      mChannelMaskPresent(false),
512      mChannelMask(0),
513      mDequeueCounter(0),
514      mInputMetadataType(kMetadataBufferTypeInvalid),
515      mOutputMetadataType(kMetadataBufferTypeInvalid),
516      mLegacyAdaptiveExperiment(false),
517      mMetadataBuffersToSubmit(0),
518      mNumUndequeuedBuffers(0),
519      mRepeatFrameDelayUs(-1ll),
520      mMaxPtsGapUs(-1ll),
521      mMaxFps(-1),
522      mTimePerFrameUs(-1ll),
523      mTimePerCaptureUs(-1ll),
524      mCreateInputBuffersSuspended(false),
525      mTunneled(false),
526      mDescribeColorAspectsIndex((OMX_INDEXTYPE)0),
527      mDescribeHDRStaticInfoIndex((OMX_INDEXTYPE)0) {
528    mUninitializedState = new UninitializedState(this);
529    mLoadedState = new LoadedState(this);
530    mLoadedToIdleState = new LoadedToIdleState(this);
531    mIdleToExecutingState = new IdleToExecutingState(this);
532    mExecutingState = new ExecutingState(this);
533
534    mOutputPortSettingsChangedState =
535        new OutputPortSettingsChangedState(this);
536
537    mExecutingToIdleState = new ExecutingToIdleState(this);
538    mIdleToLoadedState = new IdleToLoadedState(this);
539    mFlushingState = new FlushingState(this);
540
541    mPortEOS[kPortIndexInput] = mPortEOS[kPortIndexOutput] = false;
542    mInputEOSResult = OK;
543
544    memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop));
545
546    changeState(mUninitializedState);
547}
548
549ACodec::~ACodec() {
550}
551
552void ACodec::setNotificationMessage(const sp<AMessage> &msg) {
553    mNotify = msg;
554}
555
556void ACodec::initiateSetup(const sp<AMessage> &msg) {
557    msg->setWhat(kWhatSetup);
558    msg->setTarget(this);
559    msg->post();
560}
561
562void ACodec::signalSetParameters(const sp<AMessage> &params) {
563    sp<AMessage> msg = new AMessage(kWhatSetParameters, this);
564    msg->setMessage("params", params);
565    msg->post();
566}
567
568void ACodec::initiateAllocateComponent(const sp<AMessage> &msg) {
569    msg->setWhat(kWhatAllocateComponent);
570    msg->setTarget(this);
571    msg->post();
572}
573
574void ACodec::initiateConfigureComponent(const sp<AMessage> &msg) {
575    msg->setWhat(kWhatConfigureComponent);
576    msg->setTarget(this);
577    msg->post();
578}
579
580status_t ACodec::setSurface(const sp<Surface> &surface) {
581    sp<AMessage> msg = new AMessage(kWhatSetSurface, this);
582    msg->setObject("surface", surface);
583
584    sp<AMessage> response;
585    status_t err = msg->postAndAwaitResponse(&response);
586
587    if (err == OK) {
588        (void)response->findInt32("err", &err);
589    }
590    return err;
591}
592
593void ACodec::initiateCreateInputSurface() {
594    (new AMessage(kWhatCreateInputSurface, this))->post();
595}
596
597void ACodec::initiateSetInputSurface(
598        const sp<PersistentSurface> &surface) {
599    sp<AMessage> msg = new AMessage(kWhatSetInputSurface, this);
600    msg->setObject("input-surface", surface);
601    msg->post();
602}
603
604void ACodec::signalEndOfInputStream() {
605    (new AMessage(kWhatSignalEndOfInputStream, this))->post();
606}
607
608void ACodec::initiateStart() {
609    (new AMessage(kWhatStart, this))->post();
610}
611
612void ACodec::signalFlush() {
613    ALOGV("[%s] signalFlush", mComponentName.c_str());
614    (new AMessage(kWhatFlush, this))->post();
615}
616
617void ACodec::signalResume() {
618    (new AMessage(kWhatResume, this))->post();
619}
620
621void ACodec::initiateShutdown(bool keepComponentAllocated) {
622    sp<AMessage> msg = new AMessage(kWhatShutdown, this);
623    msg->setInt32("keepComponentAllocated", keepComponentAllocated);
624    msg->post();
625    if (!keepComponentAllocated) {
626        // ensure shutdown completes in 3 seconds
627        (new AMessage(kWhatReleaseCodecInstance, this))->post(3000000);
628    }
629}
630
631void ACodec::signalRequestIDRFrame() {
632    (new AMessage(kWhatRequestIDRFrame, this))->post();
633}
634
635// *** NOTE: THE FOLLOWING WORKAROUND WILL BE REMOVED ***
636// Some codecs may return input buffers before having them processed.
637// This causes a halt if we already signaled an EOS on the input
638// port.  For now keep submitting an output buffer if there was an
639// EOS on the input port, but not yet on the output port.
640void ACodec::signalSubmitOutputMetadataBufferIfEOS_workaround() {
641    if (mPortEOS[kPortIndexInput] && !mPortEOS[kPortIndexOutput] &&
642            mMetadataBuffersToSubmit > 0) {
643        (new AMessage(kWhatSubmitOutputMetadataBufferIfEOS, this))->post();
644    }
645}
646
647status_t ACodec::handleSetSurface(const sp<Surface> &surface) {
648    // allow keeping unset surface
649    if (surface == NULL) {
650        if (mNativeWindow != NULL) {
651            ALOGW("cannot unset a surface");
652            return INVALID_OPERATION;
653        }
654        return OK;
655    }
656
657    // cannot switch from bytebuffers to surface
658    if (mNativeWindow == NULL) {
659        ALOGW("component was not configured with a surface");
660        return INVALID_OPERATION;
661    }
662
663    ANativeWindow *nativeWindow = surface.get();
664    // if we have not yet started the codec, we can simply set the native window
665    if (mBuffers[kPortIndexInput].size() == 0) {
666        mNativeWindow = surface;
667        return OK;
668    }
669
670    // we do not support changing a tunneled surface after start
671    if (mTunneled) {
672        ALOGW("cannot change tunneled surface");
673        return INVALID_OPERATION;
674    }
675
676    int usageBits = 0;
677    // no need to reconnect as we will not dequeue all buffers
678    status_t err = setupNativeWindowSizeFormatAndUsage(
679            nativeWindow, &usageBits,
680            !storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment /* reconnect */);
681    if (err != OK) {
682        return err;
683    }
684
685    int ignoredFlags = kVideoGrallocUsage;
686    // New output surface is not allowed to add new usage flag except ignored ones.
687    if ((usageBits & ~(mNativeWindowUsageBits | ignoredFlags)) != 0) {
688        ALOGW("cannot change usage from %#x to %#x", mNativeWindowUsageBits, usageBits);
689        return BAD_VALUE;
690    }
691
692    // get min undequeued count. We cannot switch to a surface that has a higher
693    // undequeued count than we allocated.
694    int minUndequeuedBuffers = 0;
695    err = nativeWindow->query(
696            nativeWindow, NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
697            &minUndequeuedBuffers);
698    if (err != 0) {
699        ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
700                strerror(-err), -err);
701        return err;
702    }
703    if (minUndequeuedBuffers > (int)mNumUndequeuedBuffers) {
704        ALOGE("new surface holds onto more buffers (%d) than planned for (%zu)",
705                minUndequeuedBuffers, mNumUndequeuedBuffers);
706        return BAD_VALUE;
707    }
708
709    // we cannot change the number of output buffers while OMX is running
710    // set up surface to the same count
711    Vector<BufferInfo> &buffers = mBuffers[kPortIndexOutput];
712    ALOGV("setting up surface for %zu buffers", buffers.size());
713
714    err = native_window_set_buffer_count(nativeWindow, buffers.size());
715    if (err != 0) {
716        ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
717                -err);
718        return err;
719    }
720
721    // need to enable allocation when attaching
722    surface->getIGraphicBufferProducer()->allowAllocation(true);
723
724    // for meta data mode, we move dequeud buffers to the new surface.
725    // for non-meta mode, we must move all registered buffers
726    for (size_t i = 0; i < buffers.size(); ++i) {
727        const BufferInfo &info = buffers[i];
728        // skip undequeued buffers for meta data mode
729        if (storingMetadataInDecodedBuffers()
730                && !mLegacyAdaptiveExperiment
731                && info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
732            ALOGV("skipping buffer");
733            continue;
734        }
735        ALOGV("attaching buffer %p", info.mGraphicBuffer->getNativeBuffer());
736
737        err = surface->attachBuffer(info.mGraphicBuffer->getNativeBuffer());
738        if (err != OK) {
739            ALOGE("failed to attach buffer %p to the new surface: %s (%d)",
740                    info.mGraphicBuffer->getNativeBuffer(),
741                    strerror(-err), -err);
742            return err;
743        }
744    }
745
746    // cancel undequeued buffers to new surface
747    if (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment) {
748        for (size_t i = 0; i < buffers.size(); ++i) {
749            BufferInfo &info = buffers.editItemAt(i);
750            if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
751                ALOGV("canceling buffer %p", info.mGraphicBuffer->getNativeBuffer());
752                err = nativeWindow->cancelBuffer(
753                        nativeWindow, info.mGraphicBuffer->getNativeBuffer(), info.mFenceFd);
754                info.mFenceFd = -1;
755                if (err != OK) {
756                    ALOGE("failed to cancel buffer %p to the new surface: %s (%d)",
757                            info.mGraphicBuffer->getNativeBuffer(),
758                            strerror(-err), -err);
759                    return err;
760                }
761            }
762        }
763        // disallow further allocation
764        (void)surface->getIGraphicBufferProducer()->allowAllocation(false);
765    }
766
767    // push blank buffers to previous window if requested
768    if (mFlags & kFlagPushBlankBuffersToNativeWindowOnShutdown) {
769        pushBlankBuffersToNativeWindow(mNativeWindow.get());
770    }
771
772    mNativeWindow = nativeWindow;
773    mNativeWindowUsageBits = usageBits;
774    return OK;
775}
776
777status_t ACodec::allocateBuffersOnPort(OMX_U32 portIndex) {
778    CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
779
780    CHECK(mDealer[portIndex] == NULL);
781    CHECK(mBuffers[portIndex].isEmpty());
782
783    status_t err;
784    if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
785        if (storingMetadataInDecodedBuffers()) {
786            err = allocateOutputMetadataBuffers();
787        } else {
788            err = allocateOutputBuffersFromNativeWindow();
789        }
790    } else {
791        OMX_PARAM_PORTDEFINITIONTYPE def;
792        InitOMXParams(&def);
793        def.nPortIndex = portIndex;
794
795        err = mOMX->getParameter(
796                mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
797
798        if (err == OK) {
799            MetadataBufferType type =
800                portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType;
801            size_t bufSize = def.nBufferSize;
802            if (type == kMetadataBufferTypeANWBuffer) {
803                bufSize = sizeof(VideoNativeMetadata);
804            } else if (type == kMetadataBufferTypeNativeHandleSource) {
805                bufSize = sizeof(VideoNativeHandleMetadata);
806            }
807
808            // If using gralloc or native source input metadata buffers, allocate largest
809            // metadata size as we prefer to generate native source metadata, but component
810            // may require gralloc source. For camera source, allocate at least enough
811            // size for native metadata buffers.
812            size_t allottedSize = bufSize;
813            if (portIndex == kPortIndexInput && type == kMetadataBufferTypeANWBuffer) {
814                bufSize = max(sizeof(VideoGrallocMetadata), sizeof(VideoNativeMetadata));
815            } else if (portIndex == kPortIndexInput && type == kMetadataBufferTypeCameraSource) {
816                bufSize = max(bufSize, sizeof(VideoNativeMetadata));
817            }
818
819            size_t conversionBufferSize = 0;
820
821            sp<DataConverter> converter = mConverter[portIndex];
822            if (converter != NULL) {
823                // here we assume sane conversions of max 4:1, so result fits in int32
824                if (portIndex == kPortIndexInput) {
825                    conversionBufferSize = converter->sourceSize(bufSize);
826                } else {
827                    conversionBufferSize = converter->targetSize(bufSize);
828                }
829            }
830
831            size_t alignment = MemoryDealer::getAllocationAlignment();
832
833            ALOGV("[%s] Allocating %u buffers of size %zu/%zu (from %u using %s) on %s port",
834                    mComponentName.c_str(),
835                    def.nBufferCountActual, bufSize, allottedSize, def.nBufferSize, asString(type),
836                    portIndex == kPortIndexInput ? "input" : "output");
837
838            // verify buffer sizes to avoid overflow in align()
839            if (bufSize == 0 || max(bufSize, conversionBufferSize) > kMaxCodecBufferSize) {
840                ALOGE("b/22885421");
841                return NO_MEMORY;
842            }
843
844            // don't modify bufSize as OMX may not expect it to increase after negotiation
845            size_t alignedSize = align(bufSize, alignment);
846            size_t alignedConvSize = align(conversionBufferSize, alignment);
847            if (def.nBufferCountActual > SIZE_MAX / (alignedSize + alignedConvSize)) {
848                ALOGE("b/22885421");
849                return NO_MEMORY;
850            }
851
852            size_t totalSize = def.nBufferCountActual * (alignedSize + alignedConvSize);
853            mDealer[portIndex] = new MemoryDealer(totalSize, "ACodec");
854
855            for (OMX_U32 i = 0; i < def.nBufferCountActual && err == OK; ++i) {
856                sp<IMemory> mem = mDealer[portIndex]->allocate(bufSize);
857                if (mem == NULL || mem->pointer() == NULL) {
858                    return NO_MEMORY;
859                }
860
861                BufferInfo info;
862                info.mStatus = BufferInfo::OWNED_BY_US;
863                info.mFenceFd = -1;
864                info.mRenderInfo = NULL;
865                info.mNativeHandle = NULL;
866
867                uint32_t requiresAllocateBufferBit =
868                    (portIndex == kPortIndexInput)
869                        ? kRequiresAllocateBufferOnInputPorts
870                        : kRequiresAllocateBufferOnOutputPorts;
871
872                if (portIndex == kPortIndexInput && (mFlags & kFlagIsSecure)) {
873                    mem.clear();
874
875                    void *ptr = NULL;
876                    sp<NativeHandle> native_handle;
877                    err = mOMX->allocateSecureBuffer(
878                            mNode, portIndex, bufSize, &info.mBufferID,
879                            &ptr, &native_handle);
880
881                    // TRICKY: this representation is unorthodox, but ACodec requires
882                    // an ABuffer with a proper size to validate range offsets and lengths.
883                    // Since mData is never referenced for secure input, it is used to store
884                    // either the pointer to the secure buffer, or the opaque handle as on
885                    // some devices ptr is actually an opaque handle, not a pointer.
886
887                    // TRICKY2: use native handle as the base of the ABuffer if received one,
888                    // because Widevine source only receives these base addresses.
889                    const native_handle_t *native_handle_ptr =
890                        native_handle == NULL ? NULL : native_handle->handle();
891                    info.mData = new ABuffer(
892                            ptr != NULL ? ptr : (void *)native_handle_ptr, bufSize);
893                    info.mNativeHandle = native_handle;
894                    info.mCodecData = info.mData;
895                } else if (mQuirks & requiresAllocateBufferBit) {
896                    err = mOMX->allocateBufferWithBackup(
897                            mNode, portIndex, mem, &info.mBufferID, allottedSize);
898                } else {
899                    err = mOMX->useBuffer(mNode, portIndex, mem, &info.mBufferID, allottedSize);
900                }
901
902                if (mem != NULL) {
903                    info.mCodecData = new ABuffer(mem->pointer(), bufSize);
904                    info.mCodecRef = mem;
905
906                    if (type == kMetadataBufferTypeANWBuffer) {
907                        ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1;
908                    }
909
910                    // if we require conversion, allocate conversion buffer for client use;
911                    // otherwise, reuse codec buffer
912                    if (mConverter[portIndex] != NULL) {
913                        CHECK_GT(conversionBufferSize, (size_t)0);
914                        mem = mDealer[portIndex]->allocate(conversionBufferSize);
915                        if (mem == NULL|| mem->pointer() == NULL) {
916                            return NO_MEMORY;
917                        }
918                        info.mData = new ABuffer(mem->pointer(), conversionBufferSize);
919                        info.mMemRef = mem;
920                    } else {
921                        info.mData = info.mCodecData;
922                        info.mMemRef = info.mCodecRef;
923                    }
924                }
925
926                mBuffers[portIndex].push(info);
927            }
928        }
929    }
930
931    if (err != OK) {
932        return err;
933    }
934
935    sp<AMessage> notify = mNotify->dup();
936    notify->setInt32("what", CodecBase::kWhatBuffersAllocated);
937
938    notify->setInt32("portIndex", portIndex);
939
940    sp<PortDescription> desc = new PortDescription;
941
942    for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
943        const BufferInfo &info = mBuffers[portIndex][i];
944        desc->addBuffer(info.mBufferID, info.mData, info.mNativeHandle, info.mMemRef);
945    }
946
947    notify->setObject("portDesc", desc);
948    notify->post();
949
950    return OK;
951}
952
953status_t ACodec::setupNativeWindowSizeFormatAndUsage(
954        ANativeWindow *nativeWindow /* nonnull */, int *finalUsage /* nonnull */,
955        bool reconnect) {
956    OMX_PARAM_PORTDEFINITIONTYPE def;
957    InitOMXParams(&def);
958    def.nPortIndex = kPortIndexOutput;
959
960    status_t err = mOMX->getParameter(
961            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
962
963    if (err != OK) {
964        return err;
965    }
966
967    OMX_U32 usage = 0;
968    err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage);
969    if (err != 0) {
970        ALOGW("querying usage flags from OMX IL component failed: %d", err);
971        // XXX: Currently this error is logged, but not fatal.
972        usage = 0;
973    }
974    int omxUsage = usage;
975
976    if (mFlags & kFlagIsGrallocUsageProtected) {
977        usage |= GRALLOC_USAGE_PROTECTED;
978    }
979
980    usage |= kVideoGrallocUsage;
981    *finalUsage = usage;
982
983    memset(&mLastNativeWindowCrop, 0, sizeof(mLastNativeWindowCrop));
984    mLastNativeWindowDataSpace = HAL_DATASPACE_UNKNOWN;
985
986    ALOGV("gralloc usage: %#x(OMX) => %#x(ACodec)", omxUsage, usage);
987    return setNativeWindowSizeFormatAndUsage(
988            nativeWindow,
989            def.format.video.nFrameWidth,
990            def.format.video.nFrameHeight,
991            def.format.video.eColorFormat,
992            mRotationDegrees,
993            usage,
994            reconnect);
995}
996
997status_t ACodec::configureOutputBuffersFromNativeWindow(
998        OMX_U32 *bufferCount, OMX_U32 *bufferSize,
999        OMX_U32 *minUndequeuedBuffers, bool preregister) {
1000
1001    OMX_PARAM_PORTDEFINITIONTYPE def;
1002    InitOMXParams(&def);
1003    def.nPortIndex = kPortIndexOutput;
1004
1005    status_t err = mOMX->getParameter(
1006            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
1007
1008    if (err == OK) {
1009        err = setupNativeWindowSizeFormatAndUsage(
1010                mNativeWindow.get(), &mNativeWindowUsageBits, preregister /* reconnect */);
1011    }
1012    if (err != OK) {
1013        mNativeWindowUsageBits = 0;
1014        return err;
1015    }
1016
1017    // Exits here for tunneled video playback codecs -- i.e. skips native window
1018    // buffer allocation step as this is managed by the tunneled OMX omponent
1019    // itself and explicitly sets def.nBufferCountActual to 0.
1020    if (mTunneled) {
1021        ALOGV("Tunneled Playback: skipping native window buffer allocation.");
1022        def.nBufferCountActual = 0;
1023        err = mOMX->setParameter(
1024                mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
1025
1026        *minUndequeuedBuffers = 0;
1027        *bufferCount = 0;
1028        *bufferSize = 0;
1029        return err;
1030    }
1031
1032    *minUndequeuedBuffers = 0;
1033    err = mNativeWindow->query(
1034            mNativeWindow.get(), NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS,
1035            (int *)minUndequeuedBuffers);
1036
1037    if (err != 0) {
1038        ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
1039                strerror(-err), -err);
1040        return err;
1041    }
1042
1043    // FIXME: assume that surface is controlled by app (native window
1044    // returns the number for the case when surface is not controlled by app)
1045    // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported
1046    // For now, try to allocate 1 more buffer, but don't fail if unsuccessful
1047
1048    // Use conservative allocation while also trying to reduce starvation
1049    //
1050    // 1. allocate at least nBufferCountMin + minUndequeuedBuffers - that is the
1051    //    minimum needed for the consumer to be able to work
1052    // 2. try to allocate two (2) additional buffers to reduce starvation from
1053    //    the consumer
1054    //    plus an extra buffer to account for incorrect minUndequeuedBufs
1055    for (OMX_U32 extraBuffers = 2 + 1; /* condition inside loop */; extraBuffers--) {
1056        OMX_U32 newBufferCount =
1057            def.nBufferCountMin + *minUndequeuedBuffers + extraBuffers;
1058        def.nBufferCountActual = newBufferCount;
1059        err = mOMX->setParameter(
1060                mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
1061
1062        if (err == OK) {
1063            *minUndequeuedBuffers += extraBuffers;
1064            break;
1065        }
1066
1067        ALOGW("[%s] setting nBufferCountActual to %u failed: %d",
1068                mComponentName.c_str(), newBufferCount, err);
1069        /* exit condition */
1070        if (extraBuffers == 0) {
1071            return err;
1072        }
1073    }
1074
1075    err = native_window_set_buffer_count(
1076            mNativeWindow.get(), def.nBufferCountActual);
1077
1078    if (err != 0) {
1079        ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
1080                -err);
1081        return err;
1082    }
1083
1084    *bufferCount = def.nBufferCountActual;
1085    *bufferSize =  def.nBufferSize;
1086    return err;
1087}
1088
1089status_t ACodec::allocateOutputBuffersFromNativeWindow() {
1090    OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers;
1091    status_t err = configureOutputBuffersFromNativeWindow(
1092            &bufferCount, &bufferSize, &minUndequeuedBuffers, true /* preregister */);
1093    if (err != 0)
1094        return err;
1095    mNumUndequeuedBuffers = minUndequeuedBuffers;
1096
1097    if (!storingMetadataInDecodedBuffers()) {
1098        static_cast<Surface*>(mNativeWindow.get())
1099                ->getIGraphicBufferProducer()->allowAllocation(true);
1100    }
1101
1102    ALOGV("[%s] Allocating %u buffers from a native window of size %u on "
1103         "output port",
1104         mComponentName.c_str(), bufferCount, bufferSize);
1105
1106    // Dequeue buffers and send them to OMX
1107    for (OMX_U32 i = 0; i < bufferCount; i++) {
1108        ANativeWindowBuffer *buf;
1109        int fenceFd;
1110        err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);
1111        if (err != 0) {
1112            ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
1113            break;
1114        }
1115
1116        sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
1117        BufferInfo info;
1118        info.mStatus = BufferInfo::OWNED_BY_US;
1119        info.mFenceFd = fenceFd;
1120        info.mIsReadFence = false;
1121        info.mRenderInfo = NULL;
1122        info.mData = new ABuffer(NULL /* data */, bufferSize /* capacity */);
1123        info.mCodecData = info.mData;
1124        info.mGraphicBuffer = graphicBuffer;
1125        mBuffers[kPortIndexOutput].push(info);
1126
1127        IOMX::buffer_id bufferId;
1128        err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer,
1129                &bufferId);
1130        if (err != 0) {
1131            ALOGE("registering GraphicBuffer %u with OMX IL component failed: "
1132                 "%d", i, err);
1133            break;
1134        }
1135
1136        mBuffers[kPortIndexOutput].editItemAt(i).mBufferID = bufferId;
1137
1138        ALOGV("[%s] Registered graphic buffer with ID %u (pointer = %p)",
1139             mComponentName.c_str(),
1140             bufferId, graphicBuffer.get());
1141    }
1142
1143    OMX_U32 cancelStart;
1144    OMX_U32 cancelEnd;
1145
1146    if (err != 0) {
1147        // If an error occurred while dequeuing we need to cancel any buffers
1148        // that were dequeued.
1149        cancelStart = 0;
1150        cancelEnd = mBuffers[kPortIndexOutput].size();
1151    } else {
1152        // Return the required minimum undequeued buffers to the native window.
1153        cancelStart = bufferCount - minUndequeuedBuffers;
1154        cancelEnd = bufferCount;
1155    }
1156
1157    for (OMX_U32 i = cancelStart; i < cancelEnd; i++) {
1158        BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
1159        if (info->mStatus == BufferInfo::OWNED_BY_US) {
1160            status_t error = cancelBufferToNativeWindow(info);
1161            if (err == 0) {
1162                err = error;
1163            }
1164        }
1165    }
1166
1167    if (!storingMetadataInDecodedBuffers()) {
1168        static_cast<Surface*>(mNativeWindow.get())
1169                ->getIGraphicBufferProducer()->allowAllocation(false);
1170    }
1171
1172    return err;
1173}
1174
1175status_t ACodec::allocateOutputMetadataBuffers() {
1176    OMX_U32 bufferCount, bufferSize, minUndequeuedBuffers;
1177    status_t err = configureOutputBuffersFromNativeWindow(
1178            &bufferCount, &bufferSize, &minUndequeuedBuffers,
1179            mLegacyAdaptiveExperiment /* preregister */);
1180    if (err != 0)
1181        return err;
1182    mNumUndequeuedBuffers = minUndequeuedBuffers;
1183
1184    ALOGV("[%s] Allocating %u meta buffers on output port",
1185         mComponentName.c_str(), bufferCount);
1186
1187    size_t bufSize = mOutputMetadataType == kMetadataBufferTypeANWBuffer ?
1188            sizeof(struct VideoNativeMetadata) : sizeof(struct VideoGrallocMetadata);
1189    size_t totalSize = bufferCount * align(bufSize, MemoryDealer::getAllocationAlignment());
1190    mDealer[kPortIndexOutput] = new MemoryDealer(totalSize, "ACodec");
1191
1192    // Dequeue buffers and send them to OMX
1193    for (OMX_U32 i = 0; i < bufferCount; i++) {
1194        BufferInfo info;
1195        info.mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
1196        info.mFenceFd = -1;
1197        info.mRenderInfo = NULL;
1198        info.mGraphicBuffer = NULL;
1199        info.mDequeuedAt = mDequeueCounter;
1200
1201        sp<IMemory> mem = mDealer[kPortIndexOutput]->allocate(bufSize);
1202        if (mem == NULL || mem->pointer() == NULL) {
1203            return NO_MEMORY;
1204        }
1205        if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) {
1206            ((VideoNativeMetadata *)mem->pointer())->nFenceFd = -1;
1207        }
1208        info.mData = new ABuffer(mem->pointer(), mem->size());
1209        info.mMemRef = mem;
1210        info.mCodecData = info.mData;
1211        info.mCodecRef = mem;
1212
1213        // we use useBuffer for metadata regardless of quirks
1214        err = mOMX->useBuffer(
1215                mNode, kPortIndexOutput, mem, &info.mBufferID, mem->size());
1216        mBuffers[kPortIndexOutput].push(info);
1217
1218        ALOGV("[%s] allocated meta buffer with ID %u (pointer = %p)",
1219             mComponentName.c_str(), info.mBufferID, mem->pointer());
1220    }
1221
1222    if (mLegacyAdaptiveExperiment) {
1223        // preallocate and preregister buffers
1224        static_cast<Surface *>(mNativeWindow.get())
1225                ->getIGraphicBufferProducer()->allowAllocation(true);
1226
1227        ALOGV("[%s] Allocating %u buffers from a native window of size %u on "
1228             "output port",
1229             mComponentName.c_str(), bufferCount, bufferSize);
1230
1231        // Dequeue buffers then cancel them all
1232        for (OMX_U32 i = 0; i < bufferCount; i++) {
1233            BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
1234
1235            ANativeWindowBuffer *buf;
1236            int fenceFd;
1237            err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);
1238            if (err != 0) {
1239                ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
1240                break;
1241            }
1242
1243            sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
1244            mOMX->updateGraphicBufferInMeta(
1245                    mNode, kPortIndexOutput, graphicBuffer, info->mBufferID);
1246            info->mStatus = BufferInfo::OWNED_BY_US;
1247            info->setWriteFence(fenceFd, "allocateOutputMetadataBuffers for legacy");
1248            info->mGraphicBuffer = graphicBuffer;
1249        }
1250
1251        for (OMX_U32 i = 0; i < mBuffers[kPortIndexOutput].size(); i++) {
1252            BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
1253            if (info->mStatus == BufferInfo::OWNED_BY_US) {
1254                status_t error = cancelBufferToNativeWindow(info);
1255                if (err == OK) {
1256                    err = error;
1257                }
1258            }
1259        }
1260
1261        static_cast<Surface*>(mNativeWindow.get())
1262                ->getIGraphicBufferProducer()->allowAllocation(false);
1263    }
1264
1265    mMetadataBuffersToSubmit = bufferCount - minUndequeuedBuffers;
1266    return err;
1267}
1268
1269status_t ACodec::submitOutputMetadataBuffer() {
1270    CHECK(storingMetadataInDecodedBuffers());
1271    if (mMetadataBuffersToSubmit == 0)
1272        return OK;
1273
1274    BufferInfo *info = dequeueBufferFromNativeWindow();
1275    if (info == NULL) {
1276        return ERROR_IO;
1277    }
1278
1279    ALOGV("[%s] submitting output meta buffer ID %u for graphic buffer %p",
1280          mComponentName.c_str(), info->mBufferID, info->mGraphicBuffer.get());
1281
1282    --mMetadataBuffersToSubmit;
1283    info->checkWriteFence("submitOutputMetadataBuffer");
1284    status_t err = mOMX->fillBuffer(mNode, info->mBufferID, info->mFenceFd);
1285    info->mFenceFd = -1;
1286    if (err == OK) {
1287        info->mStatus = BufferInfo::OWNED_BY_COMPONENT;
1288    }
1289
1290    return err;
1291}
1292
1293status_t ACodec::waitForFence(int fd, const char *dbg ) {
1294    status_t res = OK;
1295    if (fd >= 0) {
1296        sp<Fence> fence = new Fence(fd);
1297        res = fence->wait(IOMX::kFenceTimeoutMs);
1298        ALOGW_IF(res != OK, "FENCE TIMEOUT for %d in %s", fd, dbg);
1299    }
1300    return res;
1301}
1302
1303// static
1304const char *ACodec::_asString(BufferInfo::Status s) {
1305    switch (s) {
1306        case BufferInfo::OWNED_BY_US:            return "OUR";
1307        case BufferInfo::OWNED_BY_COMPONENT:     return "COMPONENT";
1308        case BufferInfo::OWNED_BY_UPSTREAM:      return "UPSTREAM";
1309        case BufferInfo::OWNED_BY_DOWNSTREAM:    return "DOWNSTREAM";
1310        case BufferInfo::OWNED_BY_NATIVE_WINDOW: return "SURFACE";
1311        case BufferInfo::UNRECOGNIZED:           return "UNRECOGNIZED";
1312        default:                                 return "?";
1313    }
1314}
1315
1316void ACodec::dumpBuffers(OMX_U32 portIndex) {
1317    CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
1318    ALOGI("[%s] %s port has %zu buffers:", mComponentName.c_str(),
1319            portIndex == kPortIndexInput ? "input" : "output", mBuffers[portIndex].size());
1320    for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
1321        const BufferInfo &info = mBuffers[portIndex][i];
1322        ALOGI("  slot %2zu: #%8u %p/%p %s(%d) dequeued:%u",
1323                i, info.mBufferID, info.mGraphicBuffer.get(),
1324                info.mGraphicBuffer == NULL ? NULL : info.mGraphicBuffer->getNativeBuffer(),
1325                _asString(info.mStatus), info.mStatus, info.mDequeuedAt);
1326    }
1327}
1328
1329status_t ACodec::cancelBufferToNativeWindow(BufferInfo *info) {
1330    CHECK_EQ((int)info->mStatus, (int)BufferInfo::OWNED_BY_US);
1331
1332    ALOGV("[%s] Calling cancelBuffer on buffer %u",
1333         mComponentName.c_str(), info->mBufferID);
1334
1335    info->checkWriteFence("cancelBufferToNativeWindow");
1336    int err = mNativeWindow->cancelBuffer(
1337        mNativeWindow.get(), info->mGraphicBuffer.get(), info->mFenceFd);
1338    info->mFenceFd = -1;
1339
1340    ALOGW_IF(err != 0, "[%s] can not return buffer %u to native window",
1341            mComponentName.c_str(), info->mBufferID);
1342    // change ownership even if cancelBuffer fails
1343    info->mStatus = BufferInfo::OWNED_BY_NATIVE_WINDOW;
1344
1345    return err;
1346}
1347
1348void ACodec::updateRenderInfoForDequeuedBuffer(
1349        ANativeWindowBuffer *buf, int fenceFd, BufferInfo *info) {
1350
1351    info->mRenderInfo =
1352        mRenderTracker.updateInfoForDequeuedBuffer(
1353                buf, fenceFd, info - &mBuffers[kPortIndexOutput][0]);
1354
1355    // check for any fences already signaled
1356    notifyOfRenderedFrames(false /* dropIncomplete */, info->mRenderInfo);
1357}
1358
1359void ACodec::onFrameRendered(int64_t mediaTimeUs, nsecs_t systemNano) {
1360    if (mRenderTracker.onFrameRendered(mediaTimeUs, systemNano) != OK) {
1361        mRenderTracker.dumpRenderQueue();
1362    }
1363}
1364
1365void ACodec::notifyOfRenderedFrames(bool dropIncomplete, FrameRenderTracker::Info *until) {
1366    sp<AMessage> msg = mNotify->dup();
1367    msg->setInt32("what", CodecBase::kWhatOutputFramesRendered);
1368    std::list<FrameRenderTracker::Info> done =
1369        mRenderTracker.checkFencesAndGetRenderedFrames(until, dropIncomplete);
1370
1371    // unlink untracked frames
1372    for (std::list<FrameRenderTracker::Info>::const_iterator it = done.cbegin();
1373            it != done.cend(); ++it) {
1374        ssize_t index = it->getIndex();
1375        if (index >= 0 && (size_t)index < mBuffers[kPortIndexOutput].size()) {
1376            mBuffers[kPortIndexOutput].editItemAt(index).mRenderInfo = NULL;
1377        } else if (index >= 0) {
1378            // THIS SHOULD NEVER HAPPEN
1379            ALOGE("invalid index %zd in %zu", index, mBuffers[kPortIndexOutput].size());
1380        }
1381    }
1382
1383    if (MediaCodec::CreateFramesRenderedMessage(done, msg)) {
1384        msg->post();
1385    }
1386}
1387
1388ACodec::BufferInfo *ACodec::dequeueBufferFromNativeWindow() {
1389    ANativeWindowBuffer *buf;
1390    CHECK(mNativeWindow.get() != NULL);
1391
1392    if (mTunneled) {
1393        ALOGW("dequeueBufferFromNativeWindow() should not be called in tunnel"
1394              " video playback mode mode!");
1395        return NULL;
1396    }
1397
1398    if (mFatalError) {
1399        ALOGW("not dequeuing from native window due to fatal error");
1400        return NULL;
1401    }
1402
1403    int fenceFd = -1;
1404    do {
1405        status_t err = mNativeWindow->dequeueBuffer(mNativeWindow.get(), &buf, &fenceFd);
1406        if (err != 0) {
1407            ALOGE("dequeueBuffer failed: %s(%d).", asString(err), err);
1408            return NULL;
1409        }
1410
1411        bool stale = false;
1412        for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
1413            i--;
1414            BufferInfo *info = &mBuffers[kPortIndexOutput].editItemAt(i);
1415
1416            if (info->mGraphicBuffer != NULL &&
1417                    info->mGraphicBuffer->handle == buf->handle) {
1418                // Since consumers can attach buffers to BufferQueues, it is possible
1419                // that a known yet stale buffer can return from a surface that we
1420                // once used.  We can simply ignore this as we have already dequeued
1421                // this buffer properly.  NOTE: this does not eliminate all cases,
1422                // e.g. it is possible that we have queued the valid buffer to the
1423                // NW, and a stale copy of the same buffer gets dequeued - which will
1424                // be treated as the valid buffer by ACodec.
1425                if (info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) {
1426                    ALOGI("dequeued stale buffer %p. discarding", buf);
1427                    stale = true;
1428                    break;
1429                }
1430
1431                ALOGV("dequeued buffer %p", info->mGraphicBuffer->getNativeBuffer());
1432                info->mStatus = BufferInfo::OWNED_BY_US;
1433                info->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow");
1434                updateRenderInfoForDequeuedBuffer(buf, fenceFd, info);
1435                return info;
1436            }
1437        }
1438
1439        // It is also possible to receive a previously unregistered buffer
1440        // in non-meta mode. These should be treated as stale buffers. The
1441        // same is possible in meta mode, in which case, it will be treated
1442        // as a normal buffer, which is not desirable.
1443        // TODO: fix this.
1444        if (!stale && (!storingMetadataInDecodedBuffers() || mLegacyAdaptiveExperiment)) {
1445            ALOGI("dequeued unrecognized (stale) buffer %p. discarding", buf);
1446            stale = true;
1447        }
1448        if (stale) {
1449            // TODO: detach stale buffer, but there is no API yet to do it.
1450            buf = NULL;
1451        }
1452    } while (buf == NULL);
1453
1454    // get oldest undequeued buffer
1455    BufferInfo *oldest = NULL;
1456    for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
1457        i--;
1458        BufferInfo *info =
1459            &mBuffers[kPortIndexOutput].editItemAt(i);
1460        if (info->mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW &&
1461            (oldest == NULL ||
1462             // avoid potential issues from counter rolling over
1463             mDequeueCounter - info->mDequeuedAt >
1464                    mDequeueCounter - oldest->mDequeuedAt)) {
1465            oldest = info;
1466        }
1467    }
1468
1469    // it is impossible dequeue a buffer when there are no buffers with ANW
1470    CHECK(oldest != NULL);
1471    // it is impossible to dequeue an unknown buffer in non-meta mode, as the
1472    // while loop above does not complete
1473    CHECK(storingMetadataInDecodedBuffers());
1474
1475    // discard buffer in LRU info and replace with new buffer
1476    oldest->mGraphicBuffer = new GraphicBuffer(buf, false);
1477    oldest->mStatus = BufferInfo::OWNED_BY_US;
1478    oldest->setWriteFence(fenceFd, "dequeueBufferFromNativeWindow for oldest");
1479    mRenderTracker.untrackFrame(oldest->mRenderInfo);
1480    oldest->mRenderInfo = NULL;
1481
1482    mOMX->updateGraphicBufferInMeta(
1483            mNode, kPortIndexOutput, oldest->mGraphicBuffer,
1484            oldest->mBufferID);
1485
1486    if (mOutputMetadataType == kMetadataBufferTypeGrallocSource) {
1487        VideoGrallocMetadata *grallocMeta =
1488            reinterpret_cast<VideoGrallocMetadata *>(oldest->mData->base());
1489        ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)",
1490                (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]),
1491                mDequeueCounter - oldest->mDequeuedAt,
1492                (void *)(uintptr_t)grallocMeta->pHandle,
1493                oldest->mGraphicBuffer->handle, oldest->mData->base());
1494    } else if (mOutputMetadataType == kMetadataBufferTypeANWBuffer) {
1495        VideoNativeMetadata *nativeMeta =
1496            reinterpret_cast<VideoNativeMetadata *>(oldest->mData->base());
1497        ALOGV("replaced oldest buffer #%u with age %u (%p/%p stored in %p)",
1498                (unsigned)(oldest - &mBuffers[kPortIndexOutput][0]),
1499                mDequeueCounter - oldest->mDequeuedAt,
1500                (void *)(uintptr_t)nativeMeta->pBuffer,
1501                oldest->mGraphicBuffer->getNativeBuffer(), oldest->mData->base());
1502    }
1503
1504    updateRenderInfoForDequeuedBuffer(buf, fenceFd, oldest);
1505    return oldest;
1506}
1507
1508status_t ACodec::freeBuffersOnPort(OMX_U32 portIndex) {
1509    status_t err = OK;
1510    for (size_t i = mBuffers[portIndex].size(); i > 0;) {
1511        i--;
1512        status_t err2 = freeBuffer(portIndex, i);
1513        if (err == OK) {
1514            err = err2;
1515        }
1516    }
1517
1518    // clear mDealer even on an error
1519    mDealer[portIndex].clear();
1520    return err;
1521}
1522
1523status_t ACodec::freeOutputBuffersNotOwnedByComponent() {
1524    status_t err = OK;
1525    for (size_t i = mBuffers[kPortIndexOutput].size(); i > 0;) {
1526        i--;
1527        BufferInfo *info =
1528            &mBuffers[kPortIndexOutput].editItemAt(i);
1529
1530        // At this time some buffers may still be with the component
1531        // or being drained.
1532        if (info->mStatus != BufferInfo::OWNED_BY_COMPONENT &&
1533            info->mStatus != BufferInfo::OWNED_BY_DOWNSTREAM) {
1534            status_t err2 = freeBuffer(kPortIndexOutput, i);
1535            if (err == OK) {
1536                err = err2;
1537            }
1538        }
1539    }
1540
1541    return err;
1542}
1543
1544status_t ACodec::freeBuffer(OMX_U32 portIndex, size_t i) {
1545    BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
1546    status_t err = OK;
1547
1548    // there should not be any fences in the metadata
1549    MetadataBufferType type =
1550        portIndex == kPortIndexOutput ? mOutputMetadataType : mInputMetadataType;
1551    if (type == kMetadataBufferTypeANWBuffer && info->mData != NULL
1552            && info->mData->size() >= sizeof(VideoNativeMetadata)) {
1553        int fenceFd = ((VideoNativeMetadata *)info->mData->data())->nFenceFd;
1554        if (fenceFd >= 0) {
1555            ALOGW("unreleased fence (%d) in %s metadata buffer %zu",
1556                    fenceFd, portIndex == kPortIndexInput ? "input" : "output", i);
1557        }
1558    }
1559
1560    switch (info->mStatus) {
1561        case BufferInfo::OWNED_BY_US:
1562            if (portIndex == kPortIndexOutput && mNativeWindow != NULL) {
1563                (void)cancelBufferToNativeWindow(info);
1564            }
1565            // fall through
1566
1567        case BufferInfo::OWNED_BY_NATIVE_WINDOW:
1568            err = mOMX->freeBuffer(mNode, portIndex, info->mBufferID);
1569            break;
1570
1571        default:
1572            ALOGE("trying to free buffer not owned by us or ANW (%d)", info->mStatus);
1573            err = FAILED_TRANSACTION;
1574            break;
1575    }
1576
1577    if (info->mFenceFd >= 0) {
1578        ::close(info->mFenceFd);
1579    }
1580
1581    if (portIndex == kPortIndexOutput) {
1582        mRenderTracker.untrackFrame(info->mRenderInfo, i);
1583        info->mRenderInfo = NULL;
1584    }
1585
1586    // remove buffer even if mOMX->freeBuffer fails
1587    mBuffers[portIndex].removeAt(i);
1588    return err;
1589}
1590
1591ACodec::BufferInfo *ACodec::findBufferByID(
1592        uint32_t portIndex, IOMX::buffer_id bufferID, ssize_t *index) {
1593    for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
1594        BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
1595
1596        if (info->mBufferID == bufferID) {
1597            if (index != NULL) {
1598                *index = i;
1599            }
1600            return info;
1601        }
1602    }
1603
1604    ALOGE("Could not find buffer with ID %u", bufferID);
1605    return NULL;
1606}
1607
1608status_t ACodec::setComponentRole(
1609        bool isEncoder, const char *mime) {
1610    const char *role = getComponentRole(isEncoder, mime);
1611    if (role == NULL) {
1612        return BAD_VALUE;
1613    }
1614    status_t err = setComponentRole(mOMX, mNode, role);
1615    if (err != OK) {
1616        ALOGW("[%s] Failed to set standard component role '%s'.",
1617             mComponentName.c_str(), role);
1618    }
1619    return err;
1620}
1621
1622//static
1623const char *ACodec::getComponentRole(
1624        bool isEncoder, const char *mime) {
1625    struct MimeToRole {
1626        const char *mime;
1627        const char *decoderRole;
1628        const char *encoderRole;
1629    };
1630
1631    static const MimeToRole kMimeToRole[] = {
1632        { MEDIA_MIMETYPE_AUDIO_MPEG,
1633            "audio_decoder.mp3", "audio_encoder.mp3" },
1634        { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
1635            "audio_decoder.mp1", "audio_encoder.mp1" },
1636        { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
1637            "audio_decoder.mp2", "audio_encoder.mp2" },
1638        { MEDIA_MIMETYPE_AUDIO_AMR_NB,
1639            "audio_decoder.amrnb", "audio_encoder.amrnb" },
1640        { MEDIA_MIMETYPE_AUDIO_AMR_WB,
1641            "audio_decoder.amrwb", "audio_encoder.amrwb" },
1642        { MEDIA_MIMETYPE_AUDIO_AAC,
1643            "audio_decoder.aac", "audio_encoder.aac" },
1644        { MEDIA_MIMETYPE_AUDIO_VORBIS,
1645            "audio_decoder.vorbis", "audio_encoder.vorbis" },
1646        { MEDIA_MIMETYPE_AUDIO_OPUS,
1647            "audio_decoder.opus", "audio_encoder.opus" },
1648        { MEDIA_MIMETYPE_AUDIO_G711_MLAW,
1649            "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" },
1650        { MEDIA_MIMETYPE_AUDIO_G711_ALAW,
1651            "audio_decoder.g711alaw", "audio_encoder.g711alaw" },
1652        { MEDIA_MIMETYPE_VIDEO_AVC,
1653            "video_decoder.avc", "video_encoder.avc" },
1654        { MEDIA_MIMETYPE_VIDEO_HEVC,
1655            "video_decoder.hevc", "video_encoder.hevc" },
1656        { MEDIA_MIMETYPE_VIDEO_MPEG4,
1657            "video_decoder.mpeg4", "video_encoder.mpeg4" },
1658        { MEDIA_MIMETYPE_VIDEO_H263,
1659            "video_decoder.h263", "video_encoder.h263" },
1660        { MEDIA_MIMETYPE_VIDEO_VP8,
1661            "video_decoder.vp8", "video_encoder.vp8" },
1662        { MEDIA_MIMETYPE_VIDEO_VP9,
1663            "video_decoder.vp9", "video_encoder.vp9" },
1664        { MEDIA_MIMETYPE_AUDIO_RAW,
1665            "audio_decoder.raw", "audio_encoder.raw" },
1666        { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION,
1667            "video_decoder.dolby-vision", "video_encoder.dolby-vision" },
1668        { MEDIA_MIMETYPE_AUDIO_FLAC,
1669            "audio_decoder.flac", "audio_encoder.flac" },
1670        { MEDIA_MIMETYPE_AUDIO_MSGSM,
1671            "audio_decoder.gsm", "audio_encoder.gsm" },
1672        { MEDIA_MIMETYPE_VIDEO_MPEG2,
1673            "video_decoder.mpeg2", "video_encoder.mpeg2" },
1674        { MEDIA_MIMETYPE_AUDIO_AC3,
1675            "audio_decoder.ac3", "audio_encoder.ac3" },
1676        { MEDIA_MIMETYPE_AUDIO_EAC3,
1677            "audio_decoder.eac3", "audio_encoder.eac3" },
1678    };
1679
1680    static const size_t kNumMimeToRole =
1681        sizeof(kMimeToRole) / sizeof(kMimeToRole[0]);
1682
1683    size_t i;
1684    for (i = 0; i < kNumMimeToRole; ++i) {
1685        if (!strcasecmp(mime, kMimeToRole[i].mime)) {
1686            break;
1687        }
1688    }
1689
1690    if (i == kNumMimeToRole) {
1691        return NULL;
1692    }
1693
1694    return isEncoder ? kMimeToRole[i].encoderRole
1695                  : kMimeToRole[i].decoderRole;
1696}
1697
1698//static
1699status_t ACodec::setComponentRole(
1700        const sp<IOMX> &omx, IOMX::node_id node, const char *role) {
1701    OMX_PARAM_COMPONENTROLETYPE roleParams;
1702    InitOMXParams(&roleParams);
1703
1704    strncpy((char *)roleParams.cRole,
1705            role, OMX_MAX_STRINGNAME_SIZE - 1);
1706
1707    roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
1708
1709    return omx->setParameter(
1710            node, OMX_IndexParamStandardComponentRole,
1711            &roleParams, sizeof(roleParams));
1712}
1713
1714status_t ACodec::configureCodec(
1715        const char *mime, const sp<AMessage> &msg) {
1716    int32_t encoder;
1717    if (!msg->findInt32("encoder", &encoder)) {
1718        encoder = false;
1719    }
1720
1721    sp<AMessage> inputFormat = new AMessage;
1722    sp<AMessage> outputFormat = new AMessage;
1723    mConfigFormat = msg;
1724
1725    mIsEncoder = encoder;
1726
1727    mInputMetadataType = kMetadataBufferTypeInvalid;
1728    mOutputMetadataType = kMetadataBufferTypeInvalid;
1729
1730    status_t err = setComponentRole(encoder /* isEncoder */, mime);
1731
1732    if (err != OK) {
1733        return err;
1734    }
1735
1736    int32_t bitRate = 0;
1737    // FLAC encoder doesn't need a bitrate, other encoders do
1738    if (encoder && strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)
1739            && !msg->findInt32("bitrate", &bitRate)) {
1740        return INVALID_OPERATION;
1741    }
1742
1743    // propagate bitrate to the output so that the muxer has it
1744    if (encoder && msg->findInt32("bitrate", &bitRate)) {
1745        // Technically ISO spec says that 'bitrate' should be 0 for VBR even though it is the
1746        // average bitrate. We've been setting both bitrate and max-bitrate to this same value.
1747        outputFormat->setInt32("bitrate", bitRate);
1748        outputFormat->setInt32("max-bitrate", bitRate);
1749    }
1750
1751    int32_t storeMeta;
1752    if (encoder
1753            && msg->findInt32("android._input-metadata-buffer-type", &storeMeta)
1754            && storeMeta != kMetadataBufferTypeInvalid) {
1755        mInputMetadataType = (MetadataBufferType)storeMeta;
1756        err = mOMX->storeMetaDataInBuffers(
1757                mNode, kPortIndexInput, OMX_TRUE, &mInputMetadataType);
1758        if (err != OK) {
1759            ALOGE("[%s] storeMetaDataInBuffers (input) failed w/ err %d",
1760                    mComponentName.c_str(), err);
1761
1762            return err;
1763        } else if (storeMeta == kMetadataBufferTypeANWBuffer
1764                && mInputMetadataType == kMetadataBufferTypeGrallocSource) {
1765            // IOMX translates ANWBuffers to gralloc source already.
1766            mInputMetadataType = (MetadataBufferType)storeMeta;
1767        }
1768
1769        uint32_t usageBits;
1770        if (mOMX->getParameter(
1771                mNode, (OMX_INDEXTYPE)OMX_IndexParamConsumerUsageBits,
1772                &usageBits, sizeof(usageBits)) == OK) {
1773            inputFormat->setInt32(
1774                    "using-sw-read-often", !!(usageBits & GRALLOC_USAGE_SW_READ_OFTEN));
1775        }
1776    }
1777
1778    int32_t prependSPSPPS = 0;
1779    if (encoder
1780            && msg->findInt32("prepend-sps-pps-to-idr-frames", &prependSPSPPS)
1781            && prependSPSPPS != 0) {
1782        OMX_INDEXTYPE index;
1783        err = mOMX->getExtensionIndex(
1784                mNode,
1785                "OMX.google.android.index.prependSPSPPSToIDRFrames",
1786                &index);
1787
1788        if (err == OK) {
1789            PrependSPSPPSToIDRFramesParams params;
1790            InitOMXParams(&params);
1791            params.bEnable = OMX_TRUE;
1792
1793            err = mOMX->setParameter(
1794                    mNode, index, &params, sizeof(params));
1795        }
1796
1797        if (err != OK) {
1798            ALOGE("Encoder could not be configured to emit SPS/PPS before "
1799                  "IDR frames. (err %d)", err);
1800
1801            return err;
1802        }
1803    }
1804
1805    // Only enable metadata mode on encoder output if encoder can prepend
1806    // sps/pps to idr frames, since in metadata mode the bitstream is in an
1807    // opaque handle, to which we don't have access.
1808    int32_t video = !strncasecmp(mime, "video/", 6);
1809    mIsVideo = video;
1810    if (encoder && video) {
1811        OMX_BOOL enable = (OMX_BOOL) (prependSPSPPS
1812            && msg->findInt32("android._store-metadata-in-buffers-output", &storeMeta)
1813            && storeMeta != 0);
1814
1815        mOutputMetadataType = kMetadataBufferTypeNativeHandleSource;
1816        err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexOutput, enable, &mOutputMetadataType);
1817        if (err != OK) {
1818            ALOGE("[%s] storeMetaDataInBuffers (output) failed w/ err %d",
1819                mComponentName.c_str(), err);
1820        }
1821
1822        if (!msg->findInt64(
1823                    "repeat-previous-frame-after",
1824                    &mRepeatFrameDelayUs)) {
1825            mRepeatFrameDelayUs = -1ll;
1826        }
1827
1828        if (!msg->findInt64("max-pts-gap-to-encoder", &mMaxPtsGapUs)) {
1829            mMaxPtsGapUs = -1ll;
1830        }
1831
1832        if (!msg->findFloat("max-fps-to-encoder", &mMaxFps)) {
1833            mMaxFps = -1;
1834        }
1835
1836        if (!msg->findInt64("time-lapse", &mTimePerCaptureUs)) {
1837            mTimePerCaptureUs = -1ll;
1838        }
1839
1840        if (!msg->findInt32(
1841                    "create-input-buffers-suspended",
1842                    (int32_t*)&mCreateInputBuffersSuspended)) {
1843            mCreateInputBuffersSuspended = false;
1844        }
1845    }
1846
1847    // NOTE: we only use native window for video decoders
1848    sp<RefBase> obj;
1849    bool haveNativeWindow = msg->findObject("native-window", &obj)
1850            && obj != NULL && video && !encoder;
1851    mUsingNativeWindow = haveNativeWindow;
1852    mLegacyAdaptiveExperiment = false;
1853    if (video && !encoder) {
1854        inputFormat->setInt32("adaptive-playback", false);
1855
1856        int32_t usageProtected;
1857        if (msg->findInt32("protected", &usageProtected) && usageProtected) {
1858            if (!haveNativeWindow) {
1859                ALOGE("protected output buffers must be sent to an ANativeWindow");
1860                return PERMISSION_DENIED;
1861            }
1862            mFlags |= kFlagIsGrallocUsageProtected;
1863            mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
1864        }
1865
1866        if (mFlags & kFlagIsSecure) {
1867            // use native_handles for secure input buffers
1868            err = mOMX->enableNativeBuffers(
1869                    mNode, kPortIndexInput, OMX_FALSE /* graphic */, OMX_TRUE);
1870            ALOGI_IF(err != OK, "falling back to non-native_handles");
1871            err = OK; // ignore error for now
1872        }
1873    }
1874    if (haveNativeWindow) {
1875        sp<ANativeWindow> nativeWindow =
1876            static_cast<ANativeWindow *>(static_cast<Surface *>(obj.get()));
1877
1878        // START of temporary support for automatic FRC - THIS WILL BE REMOVED
1879        int32_t autoFrc;
1880        if (msg->findInt32("auto-frc", &autoFrc)) {
1881            bool enabled = autoFrc;
1882            OMX_CONFIG_BOOLEANTYPE config;
1883            InitOMXParams(&config);
1884            config.bEnabled = (OMX_BOOL)enabled;
1885            status_t temp = mOMX->setConfig(
1886                    mNode, (OMX_INDEXTYPE)OMX_IndexConfigAutoFramerateConversion,
1887                    &config, sizeof(config));
1888            if (temp == OK) {
1889                outputFormat->setInt32("auto-frc", enabled);
1890            } else if (enabled) {
1891                ALOGI("codec does not support requested auto-frc (err %d)", temp);
1892            }
1893        }
1894        // END of temporary support for automatic FRC
1895
1896        int32_t tunneled;
1897        if (msg->findInt32("feature-tunneled-playback", &tunneled) &&
1898            tunneled != 0) {
1899            ALOGI("Configuring TUNNELED video playback.");
1900            mTunneled = true;
1901
1902            int32_t audioHwSync = 0;
1903            if (!msg->findInt32("audio-hw-sync", &audioHwSync)) {
1904                ALOGW("No Audio HW Sync provided for video tunnel");
1905            }
1906            err = configureTunneledVideoPlayback(audioHwSync, nativeWindow);
1907            if (err != OK) {
1908                ALOGE("configureTunneledVideoPlayback(%d,%p) failed!",
1909                        audioHwSync, nativeWindow.get());
1910                return err;
1911            }
1912
1913            int32_t maxWidth = 0, maxHeight = 0;
1914            if (msg->findInt32("max-width", &maxWidth) &&
1915                    msg->findInt32("max-height", &maxHeight)) {
1916
1917                err = mOMX->prepareForAdaptivePlayback(
1918                        mNode, kPortIndexOutput, OMX_TRUE, maxWidth, maxHeight);
1919                if (err != OK) {
1920                    ALOGW("[%s] prepareForAdaptivePlayback failed w/ err %d",
1921                            mComponentName.c_str(), err);
1922                    // allow failure
1923                    err = OK;
1924                } else {
1925                    inputFormat->setInt32("max-width", maxWidth);
1926                    inputFormat->setInt32("max-height", maxHeight);
1927                    inputFormat->setInt32("adaptive-playback", true);
1928                }
1929            }
1930        } else {
1931            ALOGV("Configuring CPU controlled video playback.");
1932            mTunneled = false;
1933
1934            // Explicity reset the sideband handle of the window for
1935            // non-tunneled video in case the window was previously used
1936            // for a tunneled video playback.
1937            err = native_window_set_sideband_stream(nativeWindow.get(), NULL);
1938            if (err != OK) {
1939                ALOGE("set_sideband_stream(NULL) failed! (err %d).", err);
1940                return err;
1941            }
1942
1943            // Always try to enable dynamic output buffers on native surface
1944            mOutputMetadataType = kMetadataBufferTypeANWBuffer;
1945            err = mOMX->storeMetaDataInBuffers(
1946                    mNode, kPortIndexOutput, OMX_TRUE, &mOutputMetadataType);
1947            if (err != OK) {
1948                ALOGE("[%s] storeMetaDataInBuffers failed w/ err %d",
1949                        mComponentName.c_str(), err);
1950
1951                // if adaptive playback has been requested, try JB fallback
1952                // NOTE: THIS FALLBACK MECHANISM WILL BE REMOVED DUE TO ITS
1953                // LARGE MEMORY REQUIREMENT
1954
1955                // we will not do adaptive playback on software accessed
1956                // surfaces as they never had to respond to changes in the
1957                // crop window, and we don't trust that they will be able to.
1958                int usageBits = 0;
1959                bool canDoAdaptivePlayback;
1960
1961                if (nativeWindow->query(
1962                        nativeWindow.get(),
1963                        NATIVE_WINDOW_CONSUMER_USAGE_BITS,
1964                        &usageBits) != OK) {
1965                    canDoAdaptivePlayback = false;
1966                } else {
1967                    canDoAdaptivePlayback =
1968                        (usageBits &
1969                                (GRALLOC_USAGE_SW_READ_MASK |
1970                                 GRALLOC_USAGE_SW_WRITE_MASK)) == 0;
1971                }
1972
1973                int32_t maxWidth = 0, maxHeight = 0;
1974                if (canDoAdaptivePlayback &&
1975                        msg->findInt32("max-width", &maxWidth) &&
1976                        msg->findInt32("max-height", &maxHeight)) {
1977                    ALOGV("[%s] prepareForAdaptivePlayback(%dx%d)",
1978                            mComponentName.c_str(), maxWidth, maxHeight);
1979
1980                    err = mOMX->prepareForAdaptivePlayback(
1981                            mNode, kPortIndexOutput, OMX_TRUE, maxWidth,
1982                            maxHeight);
1983                    ALOGW_IF(err != OK,
1984                            "[%s] prepareForAdaptivePlayback failed w/ err %d",
1985                            mComponentName.c_str(), err);
1986
1987                    if (err == OK) {
1988                        inputFormat->setInt32("max-width", maxWidth);
1989                        inputFormat->setInt32("max-height", maxHeight);
1990                        inputFormat->setInt32("adaptive-playback", true);
1991                    }
1992                }
1993                // allow failure
1994                err = OK;
1995            } else {
1996                ALOGV("[%s] storeMetaDataInBuffers succeeded",
1997                        mComponentName.c_str());
1998                CHECK(storingMetadataInDecodedBuffers());
1999                mLegacyAdaptiveExperiment = ADebug::isExperimentEnabled(
2000                        "legacy-adaptive", !msg->contains("no-experiments"));
2001
2002                inputFormat->setInt32("adaptive-playback", true);
2003            }
2004
2005            int32_t push;
2006            if (msg->findInt32("push-blank-buffers-on-shutdown", &push)
2007                    && push != 0) {
2008                mFlags |= kFlagPushBlankBuffersToNativeWindowOnShutdown;
2009            }
2010        }
2011
2012        int32_t rotationDegrees;
2013        if (msg->findInt32("rotation-degrees", &rotationDegrees)) {
2014            mRotationDegrees = rotationDegrees;
2015        } else {
2016            mRotationDegrees = 0;
2017        }
2018    }
2019
2020    AudioEncoding pcmEncoding = kAudioEncodingPcm16bit;
2021    (void)msg->findInt32("pcm-encoding", (int32_t*)&pcmEncoding);
2022    // invalid encodings will default to PCM-16bit in setupRawAudioFormat.
2023
2024    if (video) {
2025        // determine need for software renderer
2026        bool usingSwRenderer = false;
2027        if (haveNativeWindow && mComponentName.startsWith("OMX.google.")) {
2028            usingSwRenderer = true;
2029            haveNativeWindow = false;
2030        }
2031
2032        if (encoder) {
2033            err = setupVideoEncoder(mime, msg, outputFormat, inputFormat);
2034        } else {
2035            err = setupVideoDecoder(mime, msg, haveNativeWindow, usingSwRenderer, outputFormat);
2036        }
2037
2038        if (err != OK) {
2039            return err;
2040        }
2041
2042        if (haveNativeWindow) {
2043            mNativeWindow = static_cast<Surface *>(obj.get());
2044        }
2045
2046        // initialize native window now to get actual output format
2047        // TODO: this is needed for some encoders even though they don't use native window
2048        err = initNativeWindow();
2049        if (err != OK) {
2050            return err;
2051        }
2052
2053        // fallback for devices that do not handle flex-YUV for native buffers
2054        if (haveNativeWindow) {
2055            int32_t requestedColorFormat = OMX_COLOR_FormatUnused;
2056            if (msg->findInt32("color-format", &requestedColorFormat) &&
2057                    requestedColorFormat == OMX_COLOR_FormatYUV420Flexible) {
2058                status_t err = getPortFormat(kPortIndexOutput, outputFormat);
2059                if (err != OK) {
2060                    return err;
2061                }
2062                int32_t colorFormat = OMX_COLOR_FormatUnused;
2063                OMX_U32 flexibleEquivalent = OMX_COLOR_FormatUnused;
2064                if (!outputFormat->findInt32("color-format", &colorFormat)) {
2065                    ALOGE("ouptut port did not have a color format (wrong domain?)");
2066                    return BAD_VALUE;
2067                }
2068                ALOGD("[%s] Requested output format %#x and got %#x.",
2069                        mComponentName.c_str(), requestedColorFormat, colorFormat);
2070                if (!isFlexibleColorFormat(
2071                                mOMX, mNode, colorFormat, haveNativeWindow, &flexibleEquivalent)
2072                        || flexibleEquivalent != (OMX_U32)requestedColorFormat) {
2073                    // device did not handle flex-YUV request for native window, fall back
2074                    // to SW renderer
2075                    ALOGI("[%s] Falling back to software renderer", mComponentName.c_str());
2076                    mNativeWindow.clear();
2077                    mNativeWindowUsageBits = 0;
2078                    haveNativeWindow = false;
2079                    usingSwRenderer = true;
2080                    if (storingMetadataInDecodedBuffers()) {
2081                        err = mOMX->storeMetaDataInBuffers(
2082                                mNode, kPortIndexOutput, OMX_FALSE, &mOutputMetadataType);
2083                        mOutputMetadataType = kMetadataBufferTypeInvalid; // just in case
2084                        // TODO: implement adaptive-playback support for bytebuffer mode.
2085                        // This is done by SW codecs, but most HW codecs don't support it.
2086                        inputFormat->setInt32("adaptive-playback", false);
2087                    }
2088                    if (err == OK) {
2089                        err = mOMX->enableNativeBuffers(
2090                                mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE);
2091                    }
2092                    if (mFlags & kFlagIsGrallocUsageProtected) {
2093                        // fallback is not supported for protected playback
2094                        err = PERMISSION_DENIED;
2095                    } else if (err == OK) {
2096                        err = setupVideoDecoder(
2097                                mime, msg, haveNativeWindow, usingSwRenderer, outputFormat);
2098                    }
2099                }
2100            }
2101        }
2102
2103        if (usingSwRenderer) {
2104            outputFormat->setInt32("using-sw-renderer", 1);
2105        }
2106    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_MPEG)) {
2107        int32_t numChannels, sampleRate;
2108        if (!msg->findInt32("channel-count", &numChannels)
2109                || !msg->findInt32("sample-rate", &sampleRate)) {
2110            // Since we did not always check for these, leave them optional
2111            // and have the decoder figure it all out.
2112            err = OK;
2113        } else {
2114            err = setupRawAudioFormat(
2115                    encoder ? kPortIndexInput : kPortIndexOutput,
2116                    sampleRate,
2117                    numChannels);
2118        }
2119    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AAC)) {
2120        int32_t numChannels, sampleRate;
2121        if (!msg->findInt32("channel-count", &numChannels)
2122                || !msg->findInt32("sample-rate", &sampleRate)) {
2123            err = INVALID_OPERATION;
2124        } else {
2125            int32_t isADTS, aacProfile;
2126            int32_t sbrMode;
2127            int32_t maxOutputChannelCount;
2128            int32_t pcmLimiterEnable;
2129            drcParams_t drc;
2130            if (!msg->findInt32("is-adts", &isADTS)) {
2131                isADTS = 0;
2132            }
2133            if (!msg->findInt32("aac-profile", &aacProfile)) {
2134                aacProfile = OMX_AUDIO_AACObjectNull;
2135            }
2136            if (!msg->findInt32("aac-sbr-mode", &sbrMode)) {
2137                sbrMode = -1;
2138            }
2139
2140            if (!msg->findInt32("aac-max-output-channel_count", &maxOutputChannelCount)) {
2141                maxOutputChannelCount = -1;
2142            }
2143            if (!msg->findInt32("aac-pcm-limiter-enable", &pcmLimiterEnable)) {
2144                // value is unknown
2145                pcmLimiterEnable = -1;
2146            }
2147            if (!msg->findInt32("aac-encoded-target-level", &drc.encodedTargetLevel)) {
2148                // value is unknown
2149                drc.encodedTargetLevel = -1;
2150            }
2151            if (!msg->findInt32("aac-drc-cut-level", &drc.drcCut)) {
2152                // value is unknown
2153                drc.drcCut = -1;
2154            }
2155            if (!msg->findInt32("aac-drc-boost-level", &drc.drcBoost)) {
2156                // value is unknown
2157                drc.drcBoost = -1;
2158            }
2159            if (!msg->findInt32("aac-drc-heavy-compression", &drc.heavyCompression)) {
2160                // value is unknown
2161                drc.heavyCompression = -1;
2162            }
2163            if (!msg->findInt32("aac-target-ref-level", &drc.targetRefLevel)) {
2164                // value is unknown
2165                drc.targetRefLevel = -1;
2166            }
2167
2168            err = setupAACCodec(
2169                    encoder, numChannels, sampleRate, bitRate, aacProfile,
2170                    isADTS != 0, sbrMode, maxOutputChannelCount, drc,
2171                    pcmLimiterEnable);
2172        }
2173    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_NB)) {
2174        err = setupAMRCodec(encoder, false /* isWAMR */, bitRate);
2175    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AMR_WB)) {
2176        err = setupAMRCodec(encoder, true /* isWAMR */, bitRate);
2177    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_ALAW)
2178            || !strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_G711_MLAW)) {
2179        // These are PCM-like formats with a fixed sample rate but
2180        // a variable number of channels.
2181
2182        int32_t numChannels;
2183        if (!msg->findInt32("channel-count", &numChannels)) {
2184            err = INVALID_OPERATION;
2185        } else {
2186            int32_t sampleRate;
2187            if (!msg->findInt32("sample-rate", &sampleRate)) {
2188                sampleRate = 8000;
2189            }
2190            err = setupG711Codec(encoder, sampleRate, numChannels);
2191        }
2192    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_FLAC)) {
2193        int32_t numChannels = 0, sampleRate = 0, compressionLevel = -1;
2194        if (encoder &&
2195                (!msg->findInt32("channel-count", &numChannels)
2196                        || !msg->findInt32("sample-rate", &sampleRate))) {
2197            ALOGE("missing channel count or sample rate for FLAC encoder");
2198            err = INVALID_OPERATION;
2199        } else {
2200            if (encoder) {
2201                if (!msg->findInt32(
2202                            "complexity", &compressionLevel) &&
2203                    !msg->findInt32(
2204                            "flac-compression-level", &compressionLevel)) {
2205                    compressionLevel = 5; // default FLAC compression level
2206                } else if (compressionLevel < 0) {
2207                    ALOGW("compression level %d outside [0..8] range, "
2208                          "using 0",
2209                          compressionLevel);
2210                    compressionLevel = 0;
2211                } else if (compressionLevel > 8) {
2212                    ALOGW("compression level %d outside [0..8] range, "
2213                          "using 8",
2214                          compressionLevel);
2215                    compressionLevel = 8;
2216                }
2217            }
2218            err = setupFlacCodec(
2219                    encoder, numChannels, sampleRate, compressionLevel);
2220        }
2221    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_RAW)) {
2222        int32_t numChannels, sampleRate;
2223        if (encoder
2224                || !msg->findInt32("channel-count", &numChannels)
2225                || !msg->findInt32("sample-rate", &sampleRate)) {
2226            err = INVALID_OPERATION;
2227        } else {
2228            err = setupRawAudioFormat(kPortIndexInput, sampleRate, numChannels, pcmEncoding);
2229        }
2230    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_AC3)) {
2231        int32_t numChannels;
2232        int32_t sampleRate;
2233        if (!msg->findInt32("channel-count", &numChannels)
2234                || !msg->findInt32("sample-rate", &sampleRate)) {
2235            err = INVALID_OPERATION;
2236        } else {
2237            err = setupAC3Codec(encoder, numChannels, sampleRate);
2238        }
2239    } else if (!strcasecmp(mime, MEDIA_MIMETYPE_AUDIO_EAC3)) {
2240        int32_t numChannels;
2241        int32_t sampleRate;
2242        if (!msg->findInt32("channel-count", &numChannels)
2243                || !msg->findInt32("sample-rate", &sampleRate)) {
2244            err = INVALID_OPERATION;
2245        } else {
2246            err = setupEAC3Codec(encoder, numChannels, sampleRate);
2247        }
2248    }
2249
2250    if (err != OK) {
2251        return err;
2252    }
2253
2254    if (!msg->findInt32("encoder-delay", &mEncoderDelay)) {
2255        mEncoderDelay = 0;
2256    }
2257
2258    if (!msg->findInt32("encoder-padding", &mEncoderPadding)) {
2259        mEncoderPadding = 0;
2260    }
2261
2262    if (msg->findInt32("channel-mask", &mChannelMask)) {
2263        mChannelMaskPresent = true;
2264    } else {
2265        mChannelMaskPresent = false;
2266    }
2267
2268    int32_t maxInputSize;
2269    if (msg->findInt32("max-input-size", &maxInputSize)) {
2270        err = setMinBufferSize(kPortIndexInput, (size_t)maxInputSize);
2271    } else if (!strcmp("OMX.Nvidia.aac.decoder", mComponentName.c_str())) {
2272        err = setMinBufferSize(kPortIndexInput, 8192);  // XXX
2273    }
2274
2275    int32_t priority;
2276    if (msg->findInt32("priority", &priority)) {
2277        err = setPriority(priority);
2278    }
2279
2280    int32_t rateInt = -1;
2281    float rateFloat = -1;
2282    if (!msg->findFloat("operating-rate", &rateFloat)) {
2283        msg->findInt32("operating-rate", &rateInt);
2284        rateFloat = (float)rateInt;  // 16MHz (FLINTMAX) is OK for upper bound.
2285    }
2286    if (rateFloat > 0) {
2287        err = setOperatingRate(rateFloat, video);
2288    }
2289
2290    // NOTE: both mBaseOutputFormat and mOutputFormat are outputFormat to signal first frame.
2291    mBaseOutputFormat = outputFormat;
2292    // trigger a kWhatOutputFormatChanged msg on first buffer
2293    mLastOutputFormat.clear();
2294
2295    err = getPortFormat(kPortIndexInput, inputFormat);
2296    if (err == OK) {
2297        err = getPortFormat(kPortIndexOutput, outputFormat);
2298        if (err == OK) {
2299            mInputFormat = inputFormat;
2300            mOutputFormat = outputFormat;
2301        }
2302    }
2303
2304    // create data converters if needed
2305    if (!video && err == OK) {
2306        AudioEncoding codecPcmEncoding = kAudioEncodingPcm16bit;
2307        if (encoder) {
2308            (void)mInputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding);
2309            mConverter[kPortIndexInput] = AudioConverter::Create(pcmEncoding, codecPcmEncoding);
2310            if (mConverter[kPortIndexInput] != NULL) {
2311                mInputFormat->setInt32("pcm-encoding", pcmEncoding);
2312            }
2313        } else {
2314            (void)mOutputFormat->findInt32("pcm-encoding", (int32_t*)&codecPcmEncoding);
2315            mConverter[kPortIndexOutput] = AudioConverter::Create(codecPcmEncoding, pcmEncoding);
2316            if (mConverter[kPortIndexOutput] != NULL) {
2317                mOutputFormat->setInt32("pcm-encoding", pcmEncoding);
2318            }
2319        }
2320    }
2321
2322    return err;
2323}
2324
2325status_t ACodec::setPriority(int32_t priority) {
2326    if (priority < 0) {
2327        return BAD_VALUE;
2328    }
2329    OMX_PARAM_U32TYPE config;
2330    InitOMXParams(&config);
2331    config.nU32 = (OMX_U32)priority;
2332    status_t temp = mOMX->setConfig(
2333            mNode, (OMX_INDEXTYPE)OMX_IndexConfigPriority,
2334            &config, sizeof(config));
2335    if (temp != OK) {
2336        ALOGI("codec does not support config priority (err %d)", temp);
2337    }
2338    return OK;
2339}
2340
2341status_t ACodec::setOperatingRate(float rateFloat, bool isVideo) {
2342    if (rateFloat < 0) {
2343        return BAD_VALUE;
2344    }
2345    OMX_U32 rate;
2346    if (isVideo) {
2347        if (rateFloat > 65535) {
2348            return BAD_VALUE;
2349        }
2350        rate = (OMX_U32)(rateFloat * 65536.0f + 0.5f);
2351    } else {
2352        if (rateFloat > UINT_MAX) {
2353            return BAD_VALUE;
2354        }
2355        rate = (OMX_U32)(rateFloat);
2356    }
2357    OMX_PARAM_U32TYPE config;
2358    InitOMXParams(&config);
2359    config.nU32 = rate;
2360    status_t err = mOMX->setConfig(
2361            mNode, (OMX_INDEXTYPE)OMX_IndexConfigOperatingRate,
2362            &config, sizeof(config));
2363    if (err != OK) {
2364        ALOGI("codec does not support config operating rate (err %d)", err);
2365    }
2366    return OK;
2367}
2368
2369status_t ACodec::getIntraRefreshPeriod(uint32_t *intraRefreshPeriod) {
2370    OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params;
2371    InitOMXParams(&params);
2372    params.nPortIndex = kPortIndexOutput;
2373    status_t err = mOMX->getConfig(
2374            mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, &params, sizeof(params));
2375    if (err == OK) {
2376        *intraRefreshPeriod = params.nRefreshPeriod;
2377        return OK;
2378    }
2379
2380    // Fallback to query through standard OMX index.
2381    OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams;
2382    InitOMXParams(&refreshParams);
2383    refreshParams.nPortIndex = kPortIndexOutput;
2384    refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic;
2385    err = mOMX->getParameter(
2386            mNode, OMX_IndexParamVideoIntraRefresh, &refreshParams, sizeof(refreshParams));
2387    if (err != OK || refreshParams.nCirMBs == 0) {
2388        *intraRefreshPeriod = 0;
2389        return OK;
2390    }
2391
2392    // Calculate period based on width and height
2393    uint32_t width, height;
2394    OMX_PARAM_PORTDEFINITIONTYPE def;
2395    InitOMXParams(&def);
2396    OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
2397    def.nPortIndex = kPortIndexOutput;
2398    err = mOMX->getParameter(
2399            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2400    if (err != OK) {
2401        *intraRefreshPeriod = 0;
2402        return err;
2403    }
2404    width = video_def->nFrameWidth;
2405    height = video_def->nFrameHeight;
2406    // Use H.264/AVC MacroBlock size 16x16
2407    *intraRefreshPeriod = divUp((divUp(width, 16u) * divUp(height, 16u)), refreshParams.nCirMBs);
2408
2409    return OK;
2410}
2411
2412status_t ACodec::setIntraRefreshPeriod(uint32_t intraRefreshPeriod, bool inConfigure) {
2413    OMX_VIDEO_CONFIG_ANDROID_INTRAREFRESHTYPE params;
2414    InitOMXParams(&params);
2415    params.nPortIndex = kPortIndexOutput;
2416    params.nRefreshPeriod = intraRefreshPeriod;
2417    status_t err = mOMX->setConfig(
2418            mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidIntraRefresh, &params, sizeof(params));
2419    if (err == OK) {
2420        return OK;
2421    }
2422
2423    // Only in configure state, a component could invoke setParameter.
2424    if (!inConfigure) {
2425        return INVALID_OPERATION;
2426    } else {
2427        ALOGI("[%s] try falling back to Cyclic", mComponentName.c_str());
2428    }
2429
2430    OMX_VIDEO_PARAM_INTRAREFRESHTYPE refreshParams;
2431    InitOMXParams(&refreshParams);
2432    refreshParams.nPortIndex = kPortIndexOutput;
2433    refreshParams.eRefreshMode = OMX_VIDEO_IntraRefreshCyclic;
2434
2435    if (intraRefreshPeriod == 0) {
2436        // 0 means disable intra refresh.
2437        refreshParams.nCirMBs = 0;
2438    } else {
2439        // Calculate macroblocks that need to be intra coded base on width and height
2440        uint32_t width, height;
2441        OMX_PARAM_PORTDEFINITIONTYPE def;
2442        InitOMXParams(&def);
2443        OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
2444        def.nPortIndex = kPortIndexOutput;
2445        err = mOMX->getParameter(
2446                mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2447        if (err != OK) {
2448            return err;
2449        }
2450        width = video_def->nFrameWidth;
2451        height = video_def->nFrameHeight;
2452        // Use H.264/AVC MacroBlock size 16x16
2453        refreshParams.nCirMBs = divUp((divUp(width, 16u) * divUp(height, 16u)), intraRefreshPeriod);
2454    }
2455
2456    err = mOMX->setParameter(mNode, OMX_IndexParamVideoIntraRefresh,
2457                             &refreshParams, sizeof(refreshParams));
2458    if (err != OK) {
2459        return err;
2460    }
2461
2462    return OK;
2463}
2464
2465status_t ACodec::configureTemporalLayers(
2466        const sp<AMessage> &msg, bool inConfigure, sp<AMessage> &outputFormat) {
2467    if (!mIsVideo || !mIsEncoder) {
2468        return INVALID_OPERATION;
2469    }
2470
2471    AString tsSchema;
2472    if (!msg->findString("ts-schema", &tsSchema)) {
2473        return OK;
2474    }
2475
2476    unsigned int numLayers = 0;
2477    unsigned int numBLayers = 0;
2478    int tags;
2479    char dummy;
2480    OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE pattern =
2481        OMX_VIDEO_AndroidTemporalLayeringPatternNone;
2482    if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1
2483            && numLayers > 0) {
2484        pattern = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC;
2485    } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c",
2486                    &numLayers, &dummy, &numBLayers, &dummy))
2487            && (tags == 1 || (tags == 3 && dummy == '+'))
2488            && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) {
2489        numLayers += numBLayers;
2490        pattern = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid;
2491    } else {
2492        ALOGI("Ignoring unsupported ts-schema [%s]", tsSchema.c_str());
2493        return BAD_VALUE;
2494    }
2495
2496    OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layerParams;
2497    InitOMXParams(&layerParams);
2498    layerParams.nPortIndex = kPortIndexOutput;
2499
2500    status_t err = mOMX->getParameter(
2501        mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
2502        &layerParams, sizeof(layerParams));
2503
2504    if (err != OK) {
2505        return err;
2506    } else if (!(layerParams.eSupportedPatterns & pattern)) {
2507        return BAD_VALUE;
2508    }
2509
2510    numLayers = min(numLayers, layerParams.nLayerCountMax);
2511    numBLayers = min(numBLayers, layerParams.nBLayerCountMax);
2512
2513    if (!inConfigure) {
2514        OMX_VIDEO_CONFIG_ANDROID_TEMPORALLAYERINGTYPE layerConfig;
2515        InitOMXParams(&layerConfig);
2516        layerConfig.nPortIndex = kPortIndexOutput;
2517        layerConfig.ePattern = pattern;
2518        layerConfig.nPLayerCountActual = numLayers - numBLayers;
2519        layerConfig.nBLayerCountActual = numBLayers;
2520        layerConfig.bBitrateRatiosSpecified = OMX_FALSE;
2521
2522        err = mOMX->setConfig(
2523                mNode, (OMX_INDEXTYPE)OMX_IndexConfigAndroidVideoTemporalLayering,
2524                &layerConfig, sizeof(layerConfig));
2525    } else {
2526        layerParams.ePattern = pattern;
2527        layerParams.nPLayerCountActual = numLayers - numBLayers;
2528        layerParams.nBLayerCountActual = numBLayers;
2529        layerParams.bBitrateRatiosSpecified = OMX_FALSE;
2530
2531        err = mOMX->setParameter(
2532                mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
2533                &layerParams, sizeof(layerParams));
2534    }
2535
2536    AString configSchema;
2537    if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid) {
2538        configSchema = AStringPrintf("android.generic.%u+%u", numLayers - numBLayers, numBLayers);
2539    } else if (pattern == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) {
2540        configSchema = AStringPrintf("webrtc.vp8.%u", numLayers);
2541    }
2542
2543    if (err != OK) {
2544        ALOGW("Failed to set temporal layers to %s (requested %s)",
2545                configSchema.c_str(), tsSchema.c_str());
2546        return err;
2547    }
2548
2549    err = mOMX->getParameter(
2550            mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
2551            &layerParams, sizeof(layerParams));
2552
2553    if (err == OK) {
2554        ALOGD("Temporal layers requested:%s configured:%s got:%s(%u: P=%u, B=%u)",
2555                tsSchema.c_str(), configSchema.c_str(),
2556                asString(layerParams.ePattern), layerParams.ePattern,
2557                layerParams.nPLayerCountActual, layerParams.nBLayerCountActual);
2558
2559        if (outputFormat.get() == mOutputFormat.get()) {
2560            mOutputFormat = mOutputFormat->dup(); // trigger an output format change event
2561        }
2562        // assume we got what we configured
2563        outputFormat->setString("ts-schema", configSchema);
2564    }
2565    return err;
2566}
2567
2568status_t ACodec::setMinBufferSize(OMX_U32 portIndex, size_t size) {
2569    OMX_PARAM_PORTDEFINITIONTYPE def;
2570    InitOMXParams(&def);
2571    def.nPortIndex = portIndex;
2572
2573    status_t err = mOMX->getParameter(
2574            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2575
2576    if (err != OK) {
2577        return err;
2578    }
2579
2580    if (def.nBufferSize >= size) {
2581        return OK;
2582    }
2583
2584    def.nBufferSize = size;
2585
2586    err = mOMX->setParameter(
2587            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2588
2589    if (err != OK) {
2590        return err;
2591    }
2592
2593    err = mOMX->getParameter(
2594            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2595
2596    if (err != OK) {
2597        return err;
2598    }
2599
2600    if (def.nBufferSize < size) {
2601        ALOGE("failed to set min buffer size to %zu (is still %u)", size, def.nBufferSize);
2602        return FAILED_TRANSACTION;
2603    }
2604
2605    return OK;
2606}
2607
2608status_t ACodec::selectAudioPortFormat(
2609        OMX_U32 portIndex, OMX_AUDIO_CODINGTYPE desiredFormat) {
2610    OMX_AUDIO_PARAM_PORTFORMATTYPE format;
2611    InitOMXParams(&format);
2612
2613    format.nPortIndex = portIndex;
2614    for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
2615        format.nIndex = index;
2616        status_t err = mOMX->getParameter(
2617                mNode, OMX_IndexParamAudioPortFormat,
2618                &format, sizeof(format));
2619
2620        if (err != OK) {
2621            return err;
2622        }
2623
2624        if (format.eEncoding == desiredFormat) {
2625            break;
2626        }
2627
2628        if (index == kMaxIndicesToCheck) {
2629            ALOGW("[%s] stopping checking formats after %u: %s(%x)",
2630                    mComponentName.c_str(), index,
2631                    asString(format.eEncoding), format.eEncoding);
2632            return ERROR_UNSUPPORTED;
2633        }
2634    }
2635
2636    return mOMX->setParameter(
2637            mNode, OMX_IndexParamAudioPortFormat, &format, sizeof(format));
2638}
2639
2640status_t ACodec::setupAACCodec(
2641        bool encoder, int32_t numChannels, int32_t sampleRate,
2642        int32_t bitRate, int32_t aacProfile, bool isADTS, int32_t sbrMode,
2643        int32_t maxOutputChannelCount, const drcParams_t& drc,
2644        int32_t pcmLimiterEnable) {
2645    if (encoder && isADTS) {
2646        return -EINVAL;
2647    }
2648
2649    status_t err = setupRawAudioFormat(
2650            encoder ? kPortIndexInput : kPortIndexOutput,
2651            sampleRate,
2652            numChannels);
2653
2654    if (err != OK) {
2655        return err;
2656    }
2657
2658    if (encoder) {
2659        err = selectAudioPortFormat(kPortIndexOutput, OMX_AUDIO_CodingAAC);
2660
2661        if (err != OK) {
2662            return err;
2663        }
2664
2665        OMX_PARAM_PORTDEFINITIONTYPE def;
2666        InitOMXParams(&def);
2667        def.nPortIndex = kPortIndexOutput;
2668
2669        err = mOMX->getParameter(
2670                mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2671
2672        if (err != OK) {
2673            return err;
2674        }
2675
2676        def.format.audio.bFlagErrorConcealment = OMX_TRUE;
2677        def.format.audio.eEncoding = OMX_AUDIO_CodingAAC;
2678
2679        err = mOMX->setParameter(
2680                mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2681
2682        if (err != OK) {
2683            return err;
2684        }
2685
2686        OMX_AUDIO_PARAM_AACPROFILETYPE profile;
2687        InitOMXParams(&profile);
2688        profile.nPortIndex = kPortIndexOutput;
2689
2690        err = mOMX->getParameter(
2691                mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
2692
2693        if (err != OK) {
2694            return err;
2695        }
2696
2697        profile.nChannels = numChannels;
2698
2699        profile.eChannelMode =
2700            (numChannels == 1)
2701                ? OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo;
2702
2703        profile.nSampleRate = sampleRate;
2704        profile.nBitRate = bitRate;
2705        profile.nAudioBandWidth = 0;
2706        profile.nFrameLength = 0;
2707        profile.nAACtools = OMX_AUDIO_AACToolAll;
2708        profile.nAACERtools = OMX_AUDIO_AACERNone;
2709        profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile;
2710        profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF;
2711        switch (sbrMode) {
2712        case 0:
2713            // disable sbr
2714            profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR;
2715            profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR;
2716            break;
2717        case 1:
2718            // enable single-rate sbr
2719            profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR;
2720            profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidDSBR;
2721            break;
2722        case 2:
2723            // enable dual-rate sbr
2724            profile.nAACtools &= ~OMX_AUDIO_AACToolAndroidSSBR;
2725            profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR;
2726            break;
2727        case -1:
2728            // enable both modes -> the codec will decide which mode should be used
2729            profile.nAACtools |= OMX_AUDIO_AACToolAndroidSSBR;
2730            profile.nAACtools |= OMX_AUDIO_AACToolAndroidDSBR;
2731            break;
2732        default:
2733            // unsupported sbr mode
2734            return BAD_VALUE;
2735        }
2736
2737
2738        err = mOMX->setParameter(
2739                mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
2740
2741        if (err != OK) {
2742            return err;
2743        }
2744
2745        return err;
2746    }
2747
2748    OMX_AUDIO_PARAM_AACPROFILETYPE profile;
2749    InitOMXParams(&profile);
2750    profile.nPortIndex = kPortIndexInput;
2751
2752    err = mOMX->getParameter(
2753            mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
2754
2755    if (err != OK) {
2756        return err;
2757    }
2758
2759    profile.nChannels = numChannels;
2760    profile.nSampleRate = sampleRate;
2761
2762    profile.eAACStreamFormat =
2763        isADTS
2764            ? OMX_AUDIO_AACStreamFormatMP4ADTS
2765            : OMX_AUDIO_AACStreamFormatMP4FF;
2766
2767    OMX_AUDIO_PARAM_ANDROID_AACPRESENTATIONTYPE presentation;
2768    InitOMXParams(&presentation);
2769    presentation.nMaxOutputChannels = maxOutputChannelCount;
2770    presentation.nDrcCut = drc.drcCut;
2771    presentation.nDrcBoost = drc.drcBoost;
2772    presentation.nHeavyCompression = drc.heavyCompression;
2773    presentation.nTargetReferenceLevel = drc.targetRefLevel;
2774    presentation.nEncodedTargetLevel = drc.encodedTargetLevel;
2775    presentation.nPCMLimiterEnable = pcmLimiterEnable;
2776
2777    status_t res = mOMX->setParameter(mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
2778    if (res == OK) {
2779        // optional parameters, will not cause configuration failure
2780        mOMX->setParameter(mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAacPresentation,
2781                &presentation, sizeof(presentation));
2782    } else {
2783        ALOGW("did not set AudioAndroidAacPresentation due to error %d when setting AudioAac", res);
2784    }
2785    return res;
2786}
2787
2788status_t ACodec::setupAC3Codec(
2789        bool encoder, int32_t numChannels, int32_t sampleRate) {
2790    status_t err = setupRawAudioFormat(
2791            encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels);
2792
2793    if (err != OK) {
2794        return err;
2795    }
2796
2797    if (encoder) {
2798        ALOGW("AC3 encoding is not supported.");
2799        return INVALID_OPERATION;
2800    }
2801
2802    OMX_AUDIO_PARAM_ANDROID_AC3TYPE def;
2803    InitOMXParams(&def);
2804    def.nPortIndex = kPortIndexInput;
2805
2806    err = mOMX->getParameter(
2807            mNode,
2808            (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
2809            &def,
2810            sizeof(def));
2811
2812    if (err != OK) {
2813        return err;
2814    }
2815
2816    def.nChannels = numChannels;
2817    def.nSampleRate = sampleRate;
2818
2819    return mOMX->setParameter(
2820            mNode,
2821            (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
2822            &def,
2823            sizeof(def));
2824}
2825
2826status_t ACodec::setupEAC3Codec(
2827        bool encoder, int32_t numChannels, int32_t sampleRate) {
2828    status_t err = setupRawAudioFormat(
2829            encoder ? kPortIndexInput : kPortIndexOutput, sampleRate, numChannels);
2830
2831    if (err != OK) {
2832        return err;
2833    }
2834
2835    if (encoder) {
2836        ALOGW("EAC3 encoding is not supported.");
2837        return INVALID_OPERATION;
2838    }
2839
2840    OMX_AUDIO_PARAM_ANDROID_EAC3TYPE def;
2841    InitOMXParams(&def);
2842    def.nPortIndex = kPortIndexInput;
2843
2844    err = mOMX->getParameter(
2845            mNode,
2846            (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
2847            &def,
2848            sizeof(def));
2849
2850    if (err != OK) {
2851        return err;
2852    }
2853
2854    def.nChannels = numChannels;
2855    def.nSampleRate = sampleRate;
2856
2857    return mOMX->setParameter(
2858            mNode,
2859            (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
2860            &def,
2861            sizeof(def));
2862}
2863
2864static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate(
2865        bool isAMRWB, int32_t bps) {
2866    if (isAMRWB) {
2867        if (bps <= 6600) {
2868            return OMX_AUDIO_AMRBandModeWB0;
2869        } else if (bps <= 8850) {
2870            return OMX_AUDIO_AMRBandModeWB1;
2871        } else if (bps <= 12650) {
2872            return OMX_AUDIO_AMRBandModeWB2;
2873        } else if (bps <= 14250) {
2874            return OMX_AUDIO_AMRBandModeWB3;
2875        } else if (bps <= 15850) {
2876            return OMX_AUDIO_AMRBandModeWB4;
2877        } else if (bps <= 18250) {
2878            return OMX_AUDIO_AMRBandModeWB5;
2879        } else if (bps <= 19850) {
2880            return OMX_AUDIO_AMRBandModeWB6;
2881        } else if (bps <= 23050) {
2882            return OMX_AUDIO_AMRBandModeWB7;
2883        }
2884
2885        // 23850 bps
2886        return OMX_AUDIO_AMRBandModeWB8;
2887    } else {  // AMRNB
2888        if (bps <= 4750) {
2889            return OMX_AUDIO_AMRBandModeNB0;
2890        } else if (bps <= 5150) {
2891            return OMX_AUDIO_AMRBandModeNB1;
2892        } else if (bps <= 5900) {
2893            return OMX_AUDIO_AMRBandModeNB2;
2894        } else if (bps <= 6700) {
2895            return OMX_AUDIO_AMRBandModeNB3;
2896        } else if (bps <= 7400) {
2897            return OMX_AUDIO_AMRBandModeNB4;
2898        } else if (bps <= 7950) {
2899            return OMX_AUDIO_AMRBandModeNB5;
2900        } else if (bps <= 10200) {
2901            return OMX_AUDIO_AMRBandModeNB6;
2902        }
2903
2904        // 12200 bps
2905        return OMX_AUDIO_AMRBandModeNB7;
2906    }
2907}
2908
2909status_t ACodec::setupAMRCodec(bool encoder, bool isWAMR, int32_t bitrate) {
2910    OMX_AUDIO_PARAM_AMRTYPE def;
2911    InitOMXParams(&def);
2912    def.nPortIndex = encoder ? kPortIndexOutput : kPortIndexInput;
2913
2914    status_t err =
2915        mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
2916
2917    if (err != OK) {
2918        return err;
2919    }
2920
2921    def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF;
2922    def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitrate);
2923
2924    err = mOMX->setParameter(
2925            mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
2926
2927    if (err != OK) {
2928        return err;
2929    }
2930
2931    return setupRawAudioFormat(
2932            encoder ? kPortIndexInput : kPortIndexOutput,
2933            isWAMR ? 16000 : 8000 /* sampleRate */,
2934            1 /* numChannels */);
2935}
2936
2937status_t ACodec::setupG711Codec(bool encoder, int32_t sampleRate, int32_t numChannels) {
2938    if (encoder) {
2939        return INVALID_OPERATION;
2940    }
2941
2942    return setupRawAudioFormat(
2943            kPortIndexInput, sampleRate, numChannels);
2944}
2945
2946status_t ACodec::setupFlacCodec(
2947        bool encoder, int32_t numChannels, int32_t sampleRate, int32_t compressionLevel) {
2948
2949    if (encoder) {
2950        OMX_AUDIO_PARAM_FLACTYPE def;
2951        InitOMXParams(&def);
2952        def.nPortIndex = kPortIndexOutput;
2953
2954        // configure compression level
2955        status_t err = mOMX->getParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def));
2956        if (err != OK) {
2957            ALOGE("setupFlacCodec(): Error %d getting OMX_IndexParamAudioFlac parameter", err);
2958            return err;
2959        }
2960        def.nCompressionLevel = compressionLevel;
2961        err = mOMX->setParameter(mNode, OMX_IndexParamAudioFlac, &def, sizeof(def));
2962        if (err != OK) {
2963            ALOGE("setupFlacCodec(): Error %d setting OMX_IndexParamAudioFlac parameter", err);
2964            return err;
2965        }
2966    }
2967
2968    return setupRawAudioFormat(
2969            encoder ? kPortIndexInput : kPortIndexOutput,
2970            sampleRate,
2971            numChannels);
2972}
2973
2974status_t ACodec::setupRawAudioFormat(
2975        OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels, AudioEncoding encoding) {
2976    OMX_PARAM_PORTDEFINITIONTYPE def;
2977    InitOMXParams(&def);
2978    def.nPortIndex = portIndex;
2979
2980    status_t err = mOMX->getParameter(
2981            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2982
2983    if (err != OK) {
2984        return err;
2985    }
2986
2987    def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
2988
2989    err = mOMX->setParameter(
2990            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
2991
2992    if (err != OK) {
2993        return err;
2994    }
2995
2996    OMX_AUDIO_PARAM_PCMMODETYPE pcmParams;
2997    InitOMXParams(&pcmParams);
2998    pcmParams.nPortIndex = portIndex;
2999
3000    err = mOMX->getParameter(
3001            mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
3002
3003    if (err != OK) {
3004        return err;
3005    }
3006
3007    pcmParams.nChannels = numChannels;
3008    switch (encoding) {
3009        case kAudioEncodingPcm8bit:
3010            pcmParams.eNumData = OMX_NumericalDataUnsigned;
3011            pcmParams.nBitPerSample = 8;
3012            break;
3013        case kAudioEncodingPcmFloat:
3014            pcmParams.eNumData = OMX_NumericalDataFloat;
3015            pcmParams.nBitPerSample = 32;
3016            break;
3017        case kAudioEncodingPcm16bit:
3018            pcmParams.eNumData = OMX_NumericalDataSigned;
3019            pcmParams.nBitPerSample = 16;
3020            break;
3021        default:
3022            return BAD_VALUE;
3023    }
3024    pcmParams.bInterleaved = OMX_TRUE;
3025    pcmParams.nSamplingRate = sampleRate;
3026    pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear;
3027
3028    if (getOMXChannelMapping(numChannels, pcmParams.eChannelMapping) != OK) {
3029        return OMX_ErrorNone;
3030    }
3031
3032    err = mOMX->setParameter(
3033            mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
3034    // if we could not set up raw format to non-16-bit, try with 16-bit
3035    // NOTE: we will also verify this via readback, in case codec ignores these fields
3036    if (err != OK && encoding != kAudioEncodingPcm16bit) {
3037        pcmParams.eNumData = OMX_NumericalDataSigned;
3038        pcmParams.nBitPerSample = 16;
3039        err = mOMX->setParameter(
3040                mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
3041    }
3042    return err;
3043}
3044
3045status_t ACodec::configureTunneledVideoPlayback(
3046        int32_t audioHwSync, const sp<ANativeWindow> &nativeWindow) {
3047    native_handle_t* sidebandHandle;
3048
3049    status_t err = mOMX->configureVideoTunnelMode(
3050            mNode, kPortIndexOutput, OMX_TRUE, audioHwSync, &sidebandHandle);
3051    if (err != OK) {
3052        ALOGE("configureVideoTunnelMode failed! (err %d).", err);
3053        return err;
3054    }
3055
3056    err = native_window_set_sideband_stream(nativeWindow.get(), sidebandHandle);
3057    if (err != OK) {
3058        ALOGE("native_window_set_sideband_stream(%p) failed! (err %d).",
3059                sidebandHandle, err);
3060        return err;
3061    }
3062
3063    return OK;
3064}
3065
3066status_t ACodec::setVideoPortFormatType(
3067        OMX_U32 portIndex,
3068        OMX_VIDEO_CODINGTYPE compressionFormat,
3069        OMX_COLOR_FORMATTYPE colorFormat,
3070        bool usingNativeBuffers) {
3071    OMX_VIDEO_PARAM_PORTFORMATTYPE format;
3072    InitOMXParams(&format);
3073    format.nPortIndex = portIndex;
3074    format.nIndex = 0;
3075    bool found = false;
3076
3077    for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
3078        format.nIndex = index;
3079        status_t err = mOMX->getParameter(
3080                mNode, OMX_IndexParamVideoPortFormat,
3081                &format, sizeof(format));
3082
3083        if (err != OK) {
3084            return err;
3085        }
3086
3087        // substitute back flexible color format to codec supported format
3088        OMX_U32 flexibleEquivalent;
3089        if (compressionFormat == OMX_VIDEO_CodingUnused
3090                && isFlexibleColorFormat(
3091                        mOMX, mNode, format.eColorFormat, usingNativeBuffers, &flexibleEquivalent)
3092                && colorFormat == flexibleEquivalent) {
3093            ALOGI("[%s] using color format %#x in place of %#x",
3094                    mComponentName.c_str(), format.eColorFormat, colorFormat);
3095            colorFormat = format.eColorFormat;
3096        }
3097
3098        // The following assertion is violated by TI's video decoder.
3099        // CHECK_EQ(format.nIndex, index);
3100
3101        if (!strcmp("OMX.TI.Video.encoder", mComponentName.c_str())) {
3102            if (portIndex == kPortIndexInput
3103                    && colorFormat == format.eColorFormat) {
3104                // eCompressionFormat does not seem right.
3105                found = true;
3106                break;
3107            }
3108            if (portIndex == kPortIndexOutput
3109                    && compressionFormat == format.eCompressionFormat) {
3110                // eColorFormat does not seem right.
3111                found = true;
3112                break;
3113            }
3114        }
3115
3116        if (format.eCompressionFormat == compressionFormat
3117            && format.eColorFormat == colorFormat) {
3118            found = true;
3119            break;
3120        }
3121
3122        if (index == kMaxIndicesToCheck) {
3123            ALOGW("[%s] stopping checking formats after %u: %s(%x)/%s(%x)",
3124                    mComponentName.c_str(), index,
3125                    asString(format.eCompressionFormat), format.eCompressionFormat,
3126                    asString(format.eColorFormat), format.eColorFormat);
3127        }
3128    }
3129
3130    if (!found) {
3131        return UNKNOWN_ERROR;
3132    }
3133
3134    status_t err = mOMX->setParameter(
3135            mNode, OMX_IndexParamVideoPortFormat,
3136            &format, sizeof(format));
3137
3138    return err;
3139}
3140
3141// Set optimal output format. OMX component lists output formats in the order
3142// of preference, but this got more complicated since the introduction of flexible
3143// YUV formats. We support a legacy behavior for applications that do not use
3144// surface output, do not specify an output format, but expect a "usable" standard
3145// OMX format. SW readable and standard formats must be flex-YUV.
3146//
3147// Suggested preference order:
3148// - optimal format for texture rendering (mediaplayer behavior)
3149// - optimal SW readable & texture renderable format (flex-YUV support)
3150// - optimal SW readable non-renderable format (flex-YUV bytebuffer support)
3151// - legacy "usable" standard formats
3152//
3153// For legacy support, we prefer a standard format, but will settle for a SW readable
3154// flex-YUV format.
3155status_t ACodec::setSupportedOutputFormat(bool getLegacyFlexibleFormat) {
3156    OMX_VIDEO_PARAM_PORTFORMATTYPE format, legacyFormat;
3157    InitOMXParams(&format);
3158    format.nPortIndex = kPortIndexOutput;
3159
3160    InitOMXParams(&legacyFormat);
3161    // this field will change when we find a suitable legacy format
3162    legacyFormat.eColorFormat = OMX_COLOR_FormatUnused;
3163
3164    for (OMX_U32 index = 0; ; ++index) {
3165        format.nIndex = index;
3166        status_t err = mOMX->getParameter(
3167                mNode, OMX_IndexParamVideoPortFormat,
3168                &format, sizeof(format));
3169        if (err != OK) {
3170            // no more formats, pick legacy format if found
3171            if (legacyFormat.eColorFormat != OMX_COLOR_FormatUnused) {
3172                 memcpy(&format, &legacyFormat, sizeof(format));
3173                 break;
3174            }
3175            return err;
3176        }
3177        if (format.eCompressionFormat != OMX_VIDEO_CodingUnused) {
3178            return OMX_ErrorBadParameter;
3179        }
3180        if (!getLegacyFlexibleFormat) {
3181            break;
3182        }
3183        // standard formats that were exposed to users before
3184        if (format.eColorFormat == OMX_COLOR_FormatYUV420Planar
3185                || format.eColorFormat == OMX_COLOR_FormatYUV420PackedPlanar
3186                || format.eColorFormat == OMX_COLOR_FormatYUV420SemiPlanar
3187                || format.eColorFormat == OMX_COLOR_FormatYUV420PackedSemiPlanar
3188                || format.eColorFormat == OMX_TI_COLOR_FormatYUV420PackedSemiPlanar) {
3189            break;
3190        }
3191        // find best legacy non-standard format
3192        OMX_U32 flexibleEquivalent;
3193        if (legacyFormat.eColorFormat == OMX_COLOR_FormatUnused
3194                && isFlexibleColorFormat(
3195                        mOMX, mNode, format.eColorFormat, false /* usingNativeBuffers */,
3196                        &flexibleEquivalent)
3197                && flexibleEquivalent == OMX_COLOR_FormatYUV420Flexible) {
3198            memcpy(&legacyFormat, &format, sizeof(format));
3199        }
3200    }
3201    return mOMX->setParameter(
3202            mNode, OMX_IndexParamVideoPortFormat,
3203            &format, sizeof(format));
3204}
3205
3206static const struct VideoCodingMapEntry {
3207    const char *mMime;
3208    OMX_VIDEO_CODINGTYPE mVideoCodingType;
3209} kVideoCodingMapEntry[] = {
3210    { MEDIA_MIMETYPE_VIDEO_AVC, OMX_VIDEO_CodingAVC },
3211    { MEDIA_MIMETYPE_VIDEO_HEVC, OMX_VIDEO_CodingHEVC },
3212    { MEDIA_MIMETYPE_VIDEO_MPEG4, OMX_VIDEO_CodingMPEG4 },
3213    { MEDIA_MIMETYPE_VIDEO_H263, OMX_VIDEO_CodingH263 },
3214    { MEDIA_MIMETYPE_VIDEO_MPEG2, OMX_VIDEO_CodingMPEG2 },
3215    { MEDIA_MIMETYPE_VIDEO_VP8, OMX_VIDEO_CodingVP8 },
3216    { MEDIA_MIMETYPE_VIDEO_VP9, OMX_VIDEO_CodingVP9 },
3217    { MEDIA_MIMETYPE_VIDEO_DOLBY_VISION, OMX_VIDEO_CodingDolbyVision },
3218};
3219
3220static status_t GetVideoCodingTypeFromMime(
3221        const char *mime, OMX_VIDEO_CODINGTYPE *codingType) {
3222    for (size_t i = 0;
3223         i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]);
3224         ++i) {
3225        if (!strcasecmp(mime, kVideoCodingMapEntry[i].mMime)) {
3226            *codingType = kVideoCodingMapEntry[i].mVideoCodingType;
3227            return OK;
3228        }
3229    }
3230
3231    *codingType = OMX_VIDEO_CodingUnused;
3232
3233    return ERROR_UNSUPPORTED;
3234}
3235
3236static status_t GetMimeTypeForVideoCoding(
3237        OMX_VIDEO_CODINGTYPE codingType, AString *mime) {
3238    for (size_t i = 0;
3239         i < sizeof(kVideoCodingMapEntry) / sizeof(kVideoCodingMapEntry[0]);
3240         ++i) {
3241        if (codingType == kVideoCodingMapEntry[i].mVideoCodingType) {
3242            *mime = kVideoCodingMapEntry[i].mMime;
3243            return OK;
3244        }
3245    }
3246
3247    mime->clear();
3248
3249    return ERROR_UNSUPPORTED;
3250}
3251
3252status_t ACodec::setPortBufferNum(OMX_U32 portIndex, int bufferNum) {
3253    OMX_PARAM_PORTDEFINITIONTYPE def;
3254    InitOMXParams(&def);
3255    def.nPortIndex = portIndex;
3256    status_t err;
3257    ALOGD("Setting [%s] %s port buffer number: %d", mComponentName.c_str(),
3258            portIndex == kPortIndexInput ? "input" : "output", bufferNum);
3259    err = mOMX->getParameter(
3260        mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
3261    if (err != OK) {
3262        return err;
3263    }
3264    def.nBufferCountActual = bufferNum;
3265    err = mOMX->setParameter(
3266        mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
3267    if (err != OK) {
3268        // Component could reject this request.
3269        ALOGW("Fail to set [%s] %s port buffer number: %d", mComponentName.c_str(),
3270            portIndex == kPortIndexInput ? "input" : "output", bufferNum);
3271    }
3272    return OK;
3273}
3274
3275status_t ACodec::setupVideoDecoder(
3276        const char *mime, const sp<AMessage> &msg, bool haveNativeWindow,
3277        bool usingSwRenderer, sp<AMessage> &outputFormat) {
3278    int32_t width, height;
3279    if (!msg->findInt32("width", &width)
3280            || !msg->findInt32("height", &height)) {
3281        return INVALID_OPERATION;
3282    }
3283
3284    OMX_VIDEO_CODINGTYPE compressionFormat;
3285    status_t err = GetVideoCodingTypeFromMime(mime, &compressionFormat);
3286
3287    if (err != OK) {
3288        return err;
3289    }
3290
3291    if (compressionFormat == OMX_VIDEO_CodingVP9) {
3292        OMX_VIDEO_PARAM_PROFILELEVELTYPE params;
3293        InitOMXParams(&params);
3294        params.nPortIndex = kPortIndexInput;
3295        // Check if VP9 decoder advertises supported profiles.
3296        params.nProfileIndex = 0;
3297        status_t err = mOMX->getParameter(
3298                mNode,
3299                OMX_IndexParamVideoProfileLevelQuerySupported,
3300                &params,
3301                sizeof(params));
3302        mIsLegacyVP9Decoder = err != OK;
3303    }
3304
3305    err = setVideoPortFormatType(
3306            kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused);
3307
3308    if (err != OK) {
3309        return err;
3310    }
3311
3312    int32_t tmp;
3313    if (msg->findInt32("color-format", &tmp)) {
3314        OMX_COLOR_FORMATTYPE colorFormat =
3315            static_cast<OMX_COLOR_FORMATTYPE>(tmp);
3316        err = setVideoPortFormatType(
3317                kPortIndexOutput, OMX_VIDEO_CodingUnused, colorFormat, haveNativeWindow);
3318        if (err != OK) {
3319            ALOGW("[%s] does not support color format %d",
3320                  mComponentName.c_str(), colorFormat);
3321            err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */);
3322        }
3323    } else {
3324        err = setSupportedOutputFormat(!haveNativeWindow /* getLegacyFlexibleFormat */);
3325    }
3326
3327    if (err != OK) {
3328        return err;
3329    }
3330
3331    // Set the component input buffer number to be |tmp|. If succeed,
3332    // component will set input port buffer number to be |tmp|. If fail,
3333    // component will keep the same buffer number as before.
3334    if (msg->findInt32("android._num-input-buffers", &tmp)) {
3335        err = setPortBufferNum(kPortIndexInput, tmp);
3336        if (err != OK)
3337            return err;
3338    }
3339
3340    // Set the component output buffer number to be |tmp|. If succeed,
3341    // component will set output port buffer number to be |tmp|. If fail,
3342    // component will keep the same buffer number as before.
3343    if (msg->findInt32("android._num-output-buffers", &tmp)) {
3344        err = setPortBufferNum(kPortIndexOutput, tmp);
3345        if (err != OK)
3346            return err;
3347    }
3348
3349    int32_t frameRateInt;
3350    float frameRateFloat;
3351    if (!msg->findFloat("frame-rate", &frameRateFloat)) {
3352        if (!msg->findInt32("frame-rate", &frameRateInt)) {
3353            frameRateInt = -1;
3354        }
3355        frameRateFloat = (float)frameRateInt;
3356    }
3357
3358    err = setVideoFormatOnPort(
3359            kPortIndexInput, width, height, compressionFormat, frameRateFloat);
3360
3361    if (err != OK) {
3362        return err;
3363    }
3364
3365    err = setVideoFormatOnPort(
3366            kPortIndexOutput, width, height, OMX_VIDEO_CodingUnused);
3367
3368    if (err != OK) {
3369        return err;
3370    }
3371
3372    err = setColorAspectsForVideoDecoder(
3373            width, height, haveNativeWindow | usingSwRenderer, msg, outputFormat);
3374    if (err == ERROR_UNSUPPORTED) { // support is optional
3375        err = OK;
3376    }
3377
3378    if (err != OK) {
3379        return err;
3380    }
3381
3382    err = setHDRStaticInfoForVideoCodec(kPortIndexOutput, msg, outputFormat);
3383    if (err == ERROR_UNSUPPORTED) { // support is optional
3384        err = OK;
3385    }
3386    return err;
3387}
3388
3389status_t ACodec::initDescribeColorAspectsIndex() {
3390    status_t err = mOMX->getExtensionIndex(
3391            mNode, "OMX.google.android.index.describeColorAspects", &mDescribeColorAspectsIndex);
3392    if (err != OK) {
3393        mDescribeColorAspectsIndex = (OMX_INDEXTYPE)0;
3394    }
3395    return err;
3396}
3397
3398status_t ACodec::setCodecColorAspects(DescribeColorAspectsParams &params, bool verify) {
3399    status_t err = ERROR_UNSUPPORTED;
3400    if (mDescribeColorAspectsIndex) {
3401        err = mOMX->setConfig(mNode, mDescribeColorAspectsIndex, &params, sizeof(params));
3402    }
3403    ALOGV("[%s] setting color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)",
3404            mComponentName.c_str(),
3405            params.sAspects.mRange, asString(params.sAspects.mRange),
3406            params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries),
3407            params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs),
3408            params.sAspects.mTransfer, asString(params.sAspects.mTransfer),
3409            err, asString(err));
3410
3411    if (verify && err == OK) {
3412        err = getCodecColorAspects(params);
3413    }
3414
3415    ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex,
3416            "[%s] setting color aspects failed even though codec advertises support",
3417            mComponentName.c_str());
3418    return err;
3419}
3420
3421status_t ACodec::setColorAspectsForVideoDecoder(
3422        int32_t width, int32_t height, bool usingNativeWindow,
3423        const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) {
3424    DescribeColorAspectsParams params;
3425    InitOMXParams(&params);
3426    params.nPortIndex = kPortIndexOutput;
3427
3428    getColorAspectsFromFormat(configFormat, params.sAspects);
3429    if (usingNativeWindow) {
3430        setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height);
3431        // The default aspects will be set back to the output format during the
3432        // getFormat phase of configure(). Set non-Unspecified values back into the
3433        // format, in case component does not support this enumeration.
3434        setColorAspectsIntoFormat(params.sAspects, outputFormat);
3435    }
3436
3437    (void)initDescribeColorAspectsIndex();
3438
3439    // communicate color aspects to codec
3440    return setCodecColorAspects(params);
3441}
3442
3443status_t ACodec::getCodecColorAspects(DescribeColorAspectsParams &params) {
3444    status_t err = ERROR_UNSUPPORTED;
3445    if (mDescribeColorAspectsIndex) {
3446        err = mOMX->getConfig(mNode, mDescribeColorAspectsIndex, &params, sizeof(params));
3447    }
3448    ALOGV("[%s] got color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) err=%d(%s)",
3449            mComponentName.c_str(),
3450            params.sAspects.mRange, asString(params.sAspects.mRange),
3451            params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries),
3452            params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs),
3453            params.sAspects.mTransfer, asString(params.sAspects.mTransfer),
3454            err, asString(err));
3455    if (params.bRequestingDataSpace) {
3456        ALOGV("for dataspace %#x", params.nDataSpace);
3457    }
3458    if (err == ERROR_UNSUPPORTED && mDescribeColorAspectsIndex
3459            && !params.bRequestingDataSpace && !params.bDataSpaceChanged) {
3460        ALOGW("[%s] getting color aspects failed even though codec advertises support",
3461                mComponentName.c_str());
3462    }
3463    return err;
3464}
3465
3466status_t ACodec::getInputColorAspectsForVideoEncoder(sp<AMessage> &format) {
3467    DescribeColorAspectsParams params;
3468    InitOMXParams(&params);
3469    params.nPortIndex = kPortIndexInput;
3470    status_t err = getCodecColorAspects(params);
3471    if (err == OK) {
3472        // we only set encoder input aspects if codec supports them
3473        setColorAspectsIntoFormat(params.sAspects, format, true /* force */);
3474    }
3475    return err;
3476}
3477
3478status_t ACodec::getDataSpace(
3479        DescribeColorAspectsParams &params, android_dataspace *dataSpace /* nonnull */,
3480        bool tryCodec) {
3481    status_t err = OK;
3482    if (tryCodec) {
3483        // request dataspace guidance from codec.
3484        params.bRequestingDataSpace = OMX_TRUE;
3485        err = getCodecColorAspects(params);
3486        params.bRequestingDataSpace = OMX_FALSE;
3487        if (err == OK && params.nDataSpace != HAL_DATASPACE_UNKNOWN) {
3488            *dataSpace = (android_dataspace)params.nDataSpace;
3489            return err;
3490        } else if (err == ERROR_UNSUPPORTED) {
3491            // ignore not-implemented error for dataspace requests
3492            err = OK;
3493        }
3494    }
3495
3496    // this returns legacy versions if available
3497    *dataSpace = getDataSpaceForColorAspects(params.sAspects, true /* mayexpand */);
3498    ALOGV("[%s] using color aspects (R:%d(%s), P:%d(%s), M:%d(%s), T:%d(%s)) "
3499          "and dataspace %#x",
3500            mComponentName.c_str(),
3501            params.sAspects.mRange, asString(params.sAspects.mRange),
3502            params.sAspects.mPrimaries, asString(params.sAspects.mPrimaries),
3503            params.sAspects.mMatrixCoeffs, asString(params.sAspects.mMatrixCoeffs),
3504            params.sAspects.mTransfer, asString(params.sAspects.mTransfer),
3505            *dataSpace);
3506    return err;
3507}
3508
3509
3510status_t ACodec::getColorAspectsAndDataSpaceForVideoDecoder(
3511        int32_t width, int32_t height, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat,
3512        android_dataspace *dataSpace) {
3513    DescribeColorAspectsParams params;
3514    InitOMXParams(&params);
3515    params.nPortIndex = kPortIndexOutput;
3516
3517    // reset default format and get resulting format
3518    getColorAspectsFromFormat(configFormat, params.sAspects);
3519    if (dataSpace != NULL) {
3520        setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height);
3521    }
3522    status_t err = setCodecColorAspects(params, true /* readBack */);
3523
3524    // we always set specified aspects for decoders
3525    setColorAspectsIntoFormat(params.sAspects, outputFormat);
3526
3527    if (dataSpace != NULL) {
3528        status_t res = getDataSpace(params, dataSpace, err == OK /* tryCodec */);
3529        if (err == OK) {
3530            err = res;
3531        }
3532    }
3533
3534    return err;
3535}
3536
3537// initial video encoder setup for bytebuffer mode
3538status_t ACodec::setColorAspectsForVideoEncoder(
3539        const sp<AMessage> &configFormat, sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) {
3540    // copy config to output format as this is not exposed via getFormat
3541    copyColorConfig(configFormat, outputFormat);
3542
3543    DescribeColorAspectsParams params;
3544    InitOMXParams(&params);
3545    params.nPortIndex = kPortIndexInput;
3546    getColorAspectsFromFormat(configFormat, params.sAspects);
3547
3548    (void)initDescribeColorAspectsIndex();
3549
3550    int32_t usingRecorder;
3551    if (configFormat->findInt32("android._using-recorder", &usingRecorder) && usingRecorder) {
3552        android_dataspace dataSpace = HAL_DATASPACE_BT709;
3553        int32_t width, height;
3554        if (configFormat->findInt32("width", &width)
3555                && configFormat->findInt32("height", &height)) {
3556            setDefaultCodecColorAspectsIfNeeded(params.sAspects, width, height);
3557            status_t err = getDataSpace(
3558                    params, &dataSpace, mDescribeColorAspectsIndex /* tryCodec */);
3559            if (err != OK) {
3560                return err;
3561            }
3562            setColorAspectsIntoFormat(params.sAspects, outputFormat);
3563        }
3564        inputFormat->setInt32("android._dataspace", (int32_t)dataSpace);
3565    }
3566
3567    // communicate color aspects to codec, but do not allow change of the platform aspects
3568    ColorAspects origAspects = params.sAspects;
3569    for (int triesLeft = 2; --triesLeft >= 0; ) {
3570        status_t err = setCodecColorAspects(params, true /* readBack */);
3571        if (err != OK
3572                || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(
3573                        params.sAspects, origAspects, true /* usePlatformAspects */)) {
3574            return err;
3575        }
3576        ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.",
3577                mComponentName.c_str());
3578    }
3579    return OK;
3580}
3581
3582status_t ACodec::setHDRStaticInfoForVideoCodec(
3583        OMX_U32 portIndex, const sp<AMessage> &configFormat, sp<AMessage> &outputFormat) {
3584    CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
3585
3586    DescribeHDRStaticInfoParams params;
3587    InitOMXParams(&params);
3588    params.nPortIndex = portIndex;
3589
3590    HDRStaticInfo *info = &params.sInfo;
3591    if (getHDRStaticInfoFromFormat(configFormat, info)) {
3592        setHDRStaticInfoIntoFormat(params.sInfo, outputFormat);
3593    }
3594
3595    (void)initDescribeHDRStaticInfoIndex();
3596
3597    // communicate HDR static Info to codec
3598    return setHDRStaticInfo(params);
3599}
3600
3601// subsequent initial video encoder setup for surface mode
3602status_t ACodec::setInitialColorAspectsForVideoEncoderSurfaceAndGetDataSpace(
3603        android_dataspace *dataSpace /* nonnull */) {
3604    DescribeColorAspectsParams params;
3605    InitOMXParams(&params);
3606    params.nPortIndex = kPortIndexInput;
3607    ColorAspects &aspects = params.sAspects;
3608
3609    // reset default format and store resulting format into both input and output formats
3610    getColorAspectsFromFormat(mConfigFormat, aspects);
3611    int32_t width, height;
3612    if (mInputFormat->findInt32("width", &width) && mInputFormat->findInt32("height", &height)) {
3613        setDefaultCodecColorAspectsIfNeeded(aspects, width, height);
3614    }
3615    setColorAspectsIntoFormat(aspects, mInputFormat);
3616    setColorAspectsIntoFormat(aspects, mOutputFormat);
3617
3618    // communicate color aspects to codec, but do not allow any change
3619    ColorAspects origAspects = aspects;
3620    status_t err = OK;
3621    for (int triesLeft = 2; mDescribeColorAspectsIndex && --triesLeft >= 0; ) {
3622        status_t err = setCodecColorAspects(params, true /* readBack */);
3623        if (err != OK || !ColorUtils::checkIfAspectsChangedAndUnspecifyThem(aspects, origAspects)) {
3624            break;
3625        }
3626        ALOGW_IF(triesLeft == 0, "[%s] Codec repeatedly changed requested ColorAspects.",
3627                mComponentName.c_str());
3628    }
3629
3630    *dataSpace = HAL_DATASPACE_BT709;
3631    aspects = origAspects; // restore desired color aspects
3632    status_t res = getDataSpace(
3633            params, dataSpace, err == OK && mDescribeColorAspectsIndex /* tryCodec */);
3634    if (err == OK) {
3635        err = res;
3636    }
3637    mInputFormat->setInt32("android._dataspace", (int32_t)*dataSpace);
3638    mInputFormat->setBuffer(
3639            "android._color-aspects", ABuffer::CreateAsCopy(&aspects, sizeof(aspects)));
3640
3641    // update input format with codec supported color aspects (basically set unsupported
3642    // aspects to Unspecified)
3643    if (err == OK) {
3644        (void)getInputColorAspectsForVideoEncoder(mInputFormat);
3645    }
3646
3647    ALOGV("set default color aspects, updated input format to %s, output format to %s",
3648            mInputFormat->debugString(4).c_str(), mOutputFormat->debugString(4).c_str());
3649
3650    return err;
3651}
3652
3653status_t ACodec::getHDRStaticInfoForVideoCodec(OMX_U32 portIndex, sp<AMessage> &format) {
3654    CHECK(portIndex == kPortIndexInput || portIndex == kPortIndexOutput);
3655    DescribeHDRStaticInfoParams params;
3656    InitOMXParams(&params);
3657    params.nPortIndex = portIndex;
3658
3659    status_t err = getHDRStaticInfo(params);
3660    if (err == OK) {
3661        // we only set decodec output HDRStaticInfo if codec supports them
3662        setHDRStaticInfoIntoFormat(params.sInfo, format);
3663    }
3664    return err;
3665}
3666
3667status_t ACodec::initDescribeHDRStaticInfoIndex() {
3668    status_t err = mOMX->getExtensionIndex(
3669            mNode, "OMX.google.android.index.describeHDRStaticInfo", &mDescribeHDRStaticInfoIndex);
3670    if (err != OK) {
3671        mDescribeHDRStaticInfoIndex = (OMX_INDEXTYPE)0;
3672    }
3673    return err;
3674}
3675
3676status_t ACodec::setHDRStaticInfo(const DescribeHDRStaticInfoParams &params) {
3677    status_t err = ERROR_UNSUPPORTED;
3678    if (mDescribeHDRStaticInfoIndex) {
3679        err = mOMX->setConfig(mNode, mDescribeHDRStaticInfoIndex, &params, sizeof(params));
3680    }
3681
3682    const HDRStaticInfo *info = &params.sInfo;
3683    ALOGV("[%s] setting  HDRStaticInfo (R: %u %u, G: %u %u, B: %u, %u, W: %u, %u, "
3684            "MaxDispL: %u, MinDispL: %u, MaxContentL: %u, MaxFrameAvgL: %u)",
3685            mComponentName.c_str(),
3686            info->sType1.mR.x, info->sType1.mR.y, info->sType1.mG.x, info->sType1.mG.y,
3687            info->sType1.mB.x, info->sType1.mB.y, info->sType1.mW.x, info->sType1.mW.y,
3688            info->sType1.mMaxDisplayLuminance, info->sType1.mMinDisplayLuminance,
3689            info->sType1.mMaxContentLightLevel, info->sType1.mMaxFrameAverageLightLevel);
3690
3691    ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex,
3692            "[%s] setting HDRStaticInfo failed even though codec advertises support",
3693            mComponentName.c_str());
3694    return err;
3695}
3696
3697status_t ACodec::getHDRStaticInfo(DescribeHDRStaticInfoParams &params) {
3698    status_t err = ERROR_UNSUPPORTED;
3699    if (mDescribeHDRStaticInfoIndex) {
3700        err = mOMX->getConfig(mNode, mDescribeHDRStaticInfoIndex, &params, sizeof(params));
3701    }
3702
3703    ALOGW_IF(err == ERROR_UNSUPPORTED && mDescribeHDRStaticInfoIndex,
3704            "[%s] getting HDRStaticInfo failed even though codec advertises support",
3705            mComponentName.c_str());
3706    return err;
3707}
3708
3709status_t ACodec::setupVideoEncoder(
3710        const char *mime, const sp<AMessage> &msg,
3711        sp<AMessage> &outputFormat, sp<AMessage> &inputFormat) {
3712    int32_t tmp;
3713    if (!msg->findInt32("color-format", &tmp)) {
3714        return INVALID_OPERATION;
3715    }
3716
3717    OMX_COLOR_FORMATTYPE colorFormat =
3718        static_cast<OMX_COLOR_FORMATTYPE>(tmp);
3719
3720    status_t err = setVideoPortFormatType(
3721            kPortIndexInput, OMX_VIDEO_CodingUnused, colorFormat);
3722
3723    if (err != OK) {
3724        ALOGE("[%s] does not support color format %d",
3725              mComponentName.c_str(), colorFormat);
3726
3727        return err;
3728    }
3729
3730    /* Input port configuration */
3731
3732    OMX_PARAM_PORTDEFINITIONTYPE def;
3733    InitOMXParams(&def);
3734
3735    OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
3736
3737    def.nPortIndex = kPortIndexInput;
3738
3739    err = mOMX->getParameter(
3740            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
3741
3742    if (err != OK) {
3743        return err;
3744    }
3745
3746    int32_t width, height, bitrate;
3747    if (!msg->findInt32("width", &width)
3748            || !msg->findInt32("height", &height)
3749            || !msg->findInt32("bitrate", &bitrate)) {
3750        return INVALID_OPERATION;
3751    }
3752
3753    video_def->nFrameWidth = width;
3754    video_def->nFrameHeight = height;
3755
3756    int32_t stride;
3757    if (!msg->findInt32("stride", &stride)) {
3758        stride = width;
3759    }
3760
3761    video_def->nStride = stride;
3762
3763    int32_t sliceHeight;
3764    if (!msg->findInt32("slice-height", &sliceHeight)) {
3765        sliceHeight = height;
3766    }
3767
3768    video_def->nSliceHeight = sliceHeight;
3769
3770    def.nBufferSize = (video_def->nStride * video_def->nSliceHeight * 3) / 2;
3771
3772    float frameRate;
3773    if (!msg->findFloat("frame-rate", &frameRate)) {
3774        int32_t tmp;
3775        if (!msg->findInt32("frame-rate", &tmp)) {
3776            return INVALID_OPERATION;
3777        }
3778        frameRate = (float)tmp;
3779        mTimePerFrameUs = (int64_t) (1000000.0f / frameRate);
3780    }
3781
3782    video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f);
3783    video_def->eCompressionFormat = OMX_VIDEO_CodingUnused;
3784    // this is redundant as it was already set up in setVideoPortFormatType
3785    // FIXME for now skip this only for flexible YUV formats
3786    if (colorFormat != OMX_COLOR_FormatYUV420Flexible) {
3787        video_def->eColorFormat = colorFormat;
3788    }
3789
3790    err = mOMX->setParameter(
3791            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
3792
3793    if (err != OK) {
3794        ALOGE("[%s] failed to set input port definition parameters.",
3795              mComponentName.c_str());
3796
3797        return err;
3798    }
3799
3800    /* Output port configuration */
3801
3802    OMX_VIDEO_CODINGTYPE compressionFormat;
3803    err = GetVideoCodingTypeFromMime(mime, &compressionFormat);
3804
3805    if (err != OK) {
3806        return err;
3807    }
3808
3809    err = setVideoPortFormatType(
3810            kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused);
3811
3812    if (err != OK) {
3813        ALOGE("[%s] does not support compression format %d",
3814             mComponentName.c_str(), compressionFormat);
3815
3816        return err;
3817    }
3818
3819    def.nPortIndex = kPortIndexOutput;
3820
3821    err = mOMX->getParameter(
3822            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
3823
3824    if (err != OK) {
3825        return err;
3826    }
3827
3828    video_def->nFrameWidth = width;
3829    video_def->nFrameHeight = height;
3830    video_def->xFramerate = 0;
3831    video_def->nBitrate = bitrate;
3832    video_def->eCompressionFormat = compressionFormat;
3833    video_def->eColorFormat = OMX_COLOR_FormatUnused;
3834
3835    err = mOMX->setParameter(
3836            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
3837
3838    if (err != OK) {
3839        ALOGE("[%s] failed to set output port definition parameters.",
3840              mComponentName.c_str());
3841
3842        return err;
3843    }
3844
3845    int32_t intraRefreshPeriod = 0;
3846    if (msg->findInt32("intra-refresh-period", &intraRefreshPeriod)
3847            && intraRefreshPeriod >= 0) {
3848        err = setIntraRefreshPeriod((uint32_t)intraRefreshPeriod, true);
3849        if (err != OK) {
3850            ALOGI("[%s] failed setIntraRefreshPeriod. Failure is fine since this key is optional",
3851                    mComponentName.c_str());
3852            err = OK;
3853        }
3854    }
3855
3856    switch (compressionFormat) {
3857        case OMX_VIDEO_CodingMPEG4:
3858            err = setupMPEG4EncoderParameters(msg);
3859            break;
3860
3861        case OMX_VIDEO_CodingH263:
3862            err = setupH263EncoderParameters(msg);
3863            break;
3864
3865        case OMX_VIDEO_CodingAVC:
3866            err = setupAVCEncoderParameters(msg);
3867            break;
3868
3869        case OMX_VIDEO_CodingHEVC:
3870            err = setupHEVCEncoderParameters(msg);
3871            break;
3872
3873        case OMX_VIDEO_CodingVP8:
3874        case OMX_VIDEO_CodingVP9:
3875            err = setupVPXEncoderParameters(msg, outputFormat);
3876            break;
3877
3878        default:
3879            break;
3880    }
3881
3882    if (err != OK) {
3883        return err;
3884    }
3885
3886    // Set up color aspects on input, but propagate them to the output format, as they will
3887    // not be read back from encoder.
3888    err = setColorAspectsForVideoEncoder(msg, outputFormat, inputFormat);
3889    if (err == ERROR_UNSUPPORTED) {
3890        ALOGI("[%s] cannot encode color aspects. Ignoring.", mComponentName.c_str());
3891        err = OK;
3892    }
3893
3894    if (err != OK) {
3895        return err;
3896    }
3897
3898    err = setHDRStaticInfoForVideoCodec(kPortIndexInput, msg, outputFormat);
3899    if (err == ERROR_UNSUPPORTED) { // support is optional
3900        ALOGI("[%s] cannot encode HDR static metadata. Ignoring.", mComponentName.c_str());
3901        err = OK;
3902    }
3903
3904    if (err != OK) {
3905        return err;
3906    }
3907
3908    switch (compressionFormat) {
3909        case OMX_VIDEO_CodingAVC:
3910        case OMX_VIDEO_CodingHEVC:
3911            err = configureTemporalLayers(msg, true /* inConfigure */, outputFormat);
3912            if (err != OK) {
3913                err = OK; // ignore failure
3914            }
3915            break;
3916
3917        case OMX_VIDEO_CodingVP8:
3918        case OMX_VIDEO_CodingVP9:
3919            // TODO: do we need to support android.generic layering? webrtc layering is
3920            // already set up in setupVPXEncoderParameters.
3921            break;
3922
3923        default:
3924            break;
3925    }
3926
3927    if (err == OK) {
3928        ALOGI("setupVideoEncoder succeeded");
3929    }
3930
3931    return err;
3932}
3933
3934status_t ACodec::setCyclicIntraMacroblockRefresh(const sp<AMessage> &msg, int32_t mode) {
3935    OMX_VIDEO_PARAM_INTRAREFRESHTYPE params;
3936    InitOMXParams(&params);
3937    params.nPortIndex = kPortIndexOutput;
3938
3939    params.eRefreshMode = static_cast<OMX_VIDEO_INTRAREFRESHTYPE>(mode);
3940
3941    if (params.eRefreshMode == OMX_VIDEO_IntraRefreshCyclic ||
3942            params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) {
3943        int32_t mbs;
3944        if (!msg->findInt32("intra-refresh-CIR-mbs", &mbs)) {
3945            return INVALID_OPERATION;
3946        }
3947        params.nCirMBs = mbs;
3948    }
3949
3950    if (params.eRefreshMode == OMX_VIDEO_IntraRefreshAdaptive ||
3951            params.eRefreshMode == OMX_VIDEO_IntraRefreshBoth) {
3952        int32_t mbs;
3953        if (!msg->findInt32("intra-refresh-AIR-mbs", &mbs)) {
3954            return INVALID_OPERATION;
3955        }
3956        params.nAirMBs = mbs;
3957
3958        int32_t ref;
3959        if (!msg->findInt32("intra-refresh-AIR-ref", &ref)) {
3960            return INVALID_OPERATION;
3961        }
3962        params.nAirRef = ref;
3963    }
3964
3965    status_t err = mOMX->setParameter(
3966            mNode, OMX_IndexParamVideoIntraRefresh,
3967            &params, sizeof(params));
3968    return err;
3969}
3970
3971static OMX_U32 setPFramesSpacing(
3972        float iFramesInterval /* seconds */, int32_t frameRate, uint32_t BFramesSpacing = 0) {
3973    // BFramesSpacing is the number of B frames between I/P frames
3974    // PFramesSpacing (the value to be returned) is the number of P frames between I frames
3975    //
3976    // keyFrameInterval = ((PFramesSpacing + 1) * BFramesSpacing) + PFramesSpacing + 1
3977    //                                     ^^^                            ^^^        ^^^
3978    //                              number of B frames                number of P    I frame
3979    //
3980    //                  = (PFramesSpacing + 1) * (BFramesSpacing + 1)
3981    //
3982    // E.g.
3983    //      I   P   I  : I-interval: 8, nPFrames 1, nBFrames 3
3984    //       BBB BBB
3985
3986    if (iFramesInterval < 0) { // just 1 key frame
3987        return 0xFFFFFFFE; // don't use maxint as key-frame-interval calculation will add 1
3988    } else if (iFramesInterval == 0) { // just key frames
3989        return 0;
3990    }
3991
3992    // round down as key-frame-interval is an upper limit
3993    uint32_t keyFrameInterval = uint32_t(frameRate * iFramesInterval);
3994    OMX_U32 ret = keyFrameInterval / (BFramesSpacing + 1);
3995    return ret > 0 ? ret - 1 : 0;
3996}
3997
3998static OMX_VIDEO_CONTROLRATETYPE getBitrateMode(const sp<AMessage> &msg) {
3999    int32_t tmp;
4000    if (!msg->findInt32("bitrate-mode", &tmp)) {
4001        return OMX_Video_ControlRateVariable;
4002    }
4003
4004    return static_cast<OMX_VIDEO_CONTROLRATETYPE>(tmp);
4005}
4006
4007status_t ACodec::setupMPEG4EncoderParameters(const sp<AMessage> &msg) {
4008    int32_t bitrate;
4009    float iFrameInterval;
4010    if (!msg->findInt32("bitrate", &bitrate)
4011            || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) {
4012        return INVALID_OPERATION;
4013    }
4014
4015    OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
4016
4017    float frameRate;
4018    if (!msg->findFloat("frame-rate", &frameRate)) {
4019        int32_t tmp;
4020        if (!msg->findInt32("frame-rate", &tmp)) {
4021            return INVALID_OPERATION;
4022        }
4023        frameRate = (float)tmp;
4024    }
4025
4026    OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type;
4027    InitOMXParams(&mpeg4type);
4028    mpeg4type.nPortIndex = kPortIndexOutput;
4029
4030    status_t err = mOMX->getParameter(
4031            mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
4032
4033    if (err != OK) {
4034        return err;
4035    }
4036
4037    mpeg4type.nSliceHeaderSpacing = 0;
4038    mpeg4type.bSVH = OMX_FALSE;
4039    mpeg4type.bGov = OMX_FALSE;
4040
4041    mpeg4type.nAllowedPictureTypes =
4042        OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
4043
4044    mpeg4type.nBFrames = 0;
4045    mpeg4type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, mpeg4type.nBFrames);
4046    if (mpeg4type.nPFrames == 0) {
4047        mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
4048    }
4049    mpeg4type.nIDCVLCThreshold = 0;
4050    mpeg4type.bACPred = OMX_TRUE;
4051    mpeg4type.nMaxPacketSize = 256;
4052    mpeg4type.nTimeIncRes = 1000;
4053    mpeg4type.nHeaderExtension = 0;
4054    mpeg4type.bReversibleVLC = OMX_FALSE;
4055
4056    int32_t profile;
4057    if (msg->findInt32("profile", &profile)) {
4058        int32_t level;
4059        if (!msg->findInt32("level", &level)) {
4060            return INVALID_OPERATION;
4061        }
4062
4063        err = verifySupportForProfileAndLevel(profile, level);
4064
4065        if (err != OK) {
4066            return err;
4067        }
4068
4069        mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profile);
4070        mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(level);
4071    }
4072
4073    err = mOMX->setParameter(
4074            mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
4075
4076    if (err != OK) {
4077        return err;
4078    }
4079
4080    err = configureBitrate(bitrate, bitrateMode);
4081
4082    if (err != OK) {
4083        return err;
4084    }
4085
4086    return setupErrorCorrectionParameters();
4087}
4088
4089status_t ACodec::setupH263EncoderParameters(const sp<AMessage> &msg) {
4090    int32_t bitrate;
4091    float iFrameInterval;
4092    if (!msg->findInt32("bitrate", &bitrate)
4093            || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) {
4094        return INVALID_OPERATION;
4095    }
4096
4097    OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
4098
4099    float frameRate;
4100    if (!msg->findFloat("frame-rate", &frameRate)) {
4101        int32_t tmp;
4102        if (!msg->findInt32("frame-rate", &tmp)) {
4103            return INVALID_OPERATION;
4104        }
4105        frameRate = (float)tmp;
4106    }
4107
4108    OMX_VIDEO_PARAM_H263TYPE h263type;
4109    InitOMXParams(&h263type);
4110    h263type.nPortIndex = kPortIndexOutput;
4111
4112    status_t err = mOMX->getParameter(
4113            mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
4114
4115    if (err != OK) {
4116        return err;
4117    }
4118
4119    h263type.nAllowedPictureTypes =
4120        OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
4121
4122    h263type.nBFrames = 0;
4123    h263type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h263type.nBFrames);
4124    if (h263type.nPFrames == 0) {
4125        h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
4126    }
4127
4128    int32_t profile;
4129    if (msg->findInt32("profile", &profile)) {
4130        int32_t level;
4131        if (!msg->findInt32("level", &level)) {
4132            return INVALID_OPERATION;
4133        }
4134
4135        err = verifySupportForProfileAndLevel(profile, level);
4136
4137        if (err != OK) {
4138            return err;
4139        }
4140
4141        h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profile);
4142        h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(level);
4143    }
4144
4145    h263type.bPLUSPTYPEAllowed = OMX_FALSE;
4146    h263type.bForceRoundingTypeToZero = OMX_FALSE;
4147    h263type.nPictureHeaderRepetition = 0;
4148    h263type.nGOBHeaderInterval = 0;
4149
4150    err = mOMX->setParameter(
4151            mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
4152
4153    if (err != OK) {
4154        return err;
4155    }
4156
4157    err = configureBitrate(bitrate, bitrateMode);
4158
4159    if (err != OK) {
4160        return err;
4161    }
4162
4163    return setupErrorCorrectionParameters();
4164}
4165
4166// static
4167int /* OMX_VIDEO_AVCLEVELTYPE */ ACodec::getAVCLevelFor(
4168        int width, int height, int rate, int bitrate,
4169        OMX_VIDEO_AVCPROFILETYPE profile) {
4170    // convert bitrate to main/baseline profile kbps equivalent
4171    switch (profile) {
4172        case OMX_VIDEO_AVCProfileHigh10:
4173            bitrate = divUp(bitrate, 3000); break;
4174        case OMX_VIDEO_AVCProfileHigh:
4175            bitrate = divUp(bitrate, 1250); break;
4176        default:
4177            bitrate = divUp(bitrate, 1000); break;
4178    }
4179
4180    // convert size and rate to MBs
4181    width = divUp(width, 16);
4182    height = divUp(height, 16);
4183    int mbs = width * height;
4184    rate *= mbs;
4185    int maxDimension = max(width, height);
4186
4187    static const int limits[][5] = {
4188        /*   MBps     MB   dim  bitrate        level */
4189        {    1485,    99,  28,     64, OMX_VIDEO_AVCLevel1  },
4190        {    1485,    99,  28,    128, OMX_VIDEO_AVCLevel1b },
4191        {    3000,   396,  56,    192, OMX_VIDEO_AVCLevel11 },
4192        {    6000,   396,  56,    384, OMX_VIDEO_AVCLevel12 },
4193        {   11880,   396,  56,    768, OMX_VIDEO_AVCLevel13 },
4194        {   11880,   396,  56,   2000, OMX_VIDEO_AVCLevel2  },
4195        {   19800,   792,  79,   4000, OMX_VIDEO_AVCLevel21 },
4196        {   20250,  1620, 113,   4000, OMX_VIDEO_AVCLevel22 },
4197        {   40500,  1620, 113,  10000, OMX_VIDEO_AVCLevel3  },
4198        {  108000,  3600, 169,  14000, OMX_VIDEO_AVCLevel31 },
4199        {  216000,  5120, 202,  20000, OMX_VIDEO_AVCLevel32 },
4200        {  245760,  8192, 256,  20000, OMX_VIDEO_AVCLevel4  },
4201        {  245760,  8192, 256,  50000, OMX_VIDEO_AVCLevel41 },
4202        {  522240,  8704, 263,  50000, OMX_VIDEO_AVCLevel42 },
4203        {  589824, 22080, 420, 135000, OMX_VIDEO_AVCLevel5  },
4204        {  983040, 36864, 543, 240000, OMX_VIDEO_AVCLevel51 },
4205        { 2073600, 36864, 543, 240000, OMX_VIDEO_AVCLevel52 },
4206    };
4207
4208    for (size_t i = 0; i < ARRAY_SIZE(limits); i++) {
4209        const int (&limit)[5] = limits[i];
4210        if (rate <= limit[0] && mbs <= limit[1] && maxDimension <= limit[2]
4211                && bitrate <= limit[3]) {
4212            return limit[4];
4213        }
4214    }
4215    return 0;
4216}
4217
4218status_t ACodec::setupAVCEncoderParameters(const sp<AMessage> &msg) {
4219    int32_t bitrate;
4220    float iFrameInterval;
4221    if (!msg->findInt32("bitrate", &bitrate)
4222            || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) {
4223        return INVALID_OPERATION;
4224    }
4225
4226    OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
4227
4228    float frameRate;
4229    if (!msg->findFloat("frame-rate", &frameRate)) {
4230        int32_t tmp;
4231        if (!msg->findInt32("frame-rate", &tmp)) {
4232            return INVALID_OPERATION;
4233        }
4234        frameRate = (float)tmp;
4235    }
4236
4237    status_t err = OK;
4238    int32_t intraRefreshMode = 0;
4239    if (msg->findInt32("intra-refresh-mode", &intraRefreshMode)) {
4240        err = setCyclicIntraMacroblockRefresh(msg, intraRefreshMode);
4241        if (err != OK) {
4242            ALOGE("Setting intra macroblock refresh mode (%d) failed: 0x%x",
4243                    err, intraRefreshMode);
4244            return err;
4245        }
4246    }
4247
4248    OMX_VIDEO_PARAM_AVCTYPE h264type;
4249    InitOMXParams(&h264type);
4250    h264type.nPortIndex = kPortIndexOutput;
4251
4252    err = mOMX->getParameter(
4253            mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
4254
4255    if (err != OK) {
4256        return err;
4257    }
4258
4259    h264type.nAllowedPictureTypes =
4260        OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
4261
4262    int32_t profile;
4263    if (msg->findInt32("profile", &profile)) {
4264        int32_t level;
4265        if (!msg->findInt32("level", &level)) {
4266            return INVALID_OPERATION;
4267        }
4268
4269        err = verifySupportForProfileAndLevel(profile, level);
4270
4271        if (err != OK) {
4272            return err;
4273        }
4274
4275        h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profile);
4276        h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(level);
4277    } else {
4278        h264type.eProfile = OMX_VIDEO_AVCProfileBaseline;
4279#if 0   /* DON'T YET DEFAULT TO HIGHEST PROFILE */
4280        // Use largest supported profile for AVC recording if profile is not specified.
4281        for (OMX_VIDEO_AVCPROFILETYPE profile : {
4282                OMX_VIDEO_AVCProfileHigh, OMX_VIDEO_AVCProfileMain }) {
4283            if (verifySupportForProfileAndLevel(profile, 0) == OK) {
4284                h264type.eProfile = profile;
4285                break;
4286            }
4287        }
4288#endif
4289    }
4290
4291    ALOGI("setupAVCEncoderParameters with [profile: %s] [level: %s]",
4292            asString(h264type.eProfile), asString(h264type.eLevel));
4293
4294    if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) {
4295        h264type.nSliceHeaderSpacing = 0;
4296        h264type.bUseHadamard = OMX_TRUE;
4297        h264type.nRefFrames = 1;
4298        h264type.nBFrames = 0;
4299        h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames);
4300        if (h264type.nPFrames == 0) {
4301            h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
4302        }
4303        h264type.nRefIdx10ActiveMinus1 = 0;
4304        h264type.nRefIdx11ActiveMinus1 = 0;
4305        h264type.bEntropyCodingCABAC = OMX_FALSE;
4306        h264type.bWeightedPPrediction = OMX_FALSE;
4307        h264type.bconstIpred = OMX_FALSE;
4308        h264type.bDirect8x8Inference = OMX_FALSE;
4309        h264type.bDirectSpatialTemporal = OMX_FALSE;
4310        h264type.nCabacInitIdc = 0;
4311    } else if (h264type.eProfile == OMX_VIDEO_AVCProfileMain ||
4312            h264type.eProfile == OMX_VIDEO_AVCProfileHigh) {
4313        h264type.nSliceHeaderSpacing = 0;
4314        h264type.bUseHadamard = OMX_TRUE;
4315        h264type.nRefFrames = 2;
4316        h264type.nBFrames = 1;
4317        h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames);
4318        h264type.nAllowedPictureTypes =
4319            OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP | OMX_VIDEO_PictureTypeB;
4320        h264type.nRefIdx10ActiveMinus1 = 0;
4321        h264type.nRefIdx11ActiveMinus1 = 0;
4322        h264type.bEntropyCodingCABAC = OMX_TRUE;
4323        h264type.bWeightedPPrediction = OMX_TRUE;
4324        h264type.bconstIpred = OMX_TRUE;
4325        h264type.bDirect8x8Inference = OMX_TRUE;
4326        h264type.bDirectSpatialTemporal = OMX_TRUE;
4327        h264type.nCabacInitIdc = 1;
4328    }
4329
4330    if (h264type.nBFrames != 0) {
4331        h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB;
4332    }
4333
4334    h264type.bEnableUEP = OMX_FALSE;
4335    h264type.bEnableFMO = OMX_FALSE;
4336    h264type.bEnableASO = OMX_FALSE;
4337    h264type.bEnableRS = OMX_FALSE;
4338    h264type.bFrameMBsOnly = OMX_TRUE;
4339    h264type.bMBAFF = OMX_FALSE;
4340    h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable;
4341
4342    err = mOMX->setParameter(
4343            mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
4344
4345    if (err != OK) {
4346        return err;
4347    }
4348
4349    // TRICKY: if we are enabling temporal layering as well, some codecs may not support layering
4350    // when B-frames are enabled. Detect this now so we can disable B frames if temporal layering
4351    // is preferred.
4352    AString tsSchema;
4353    int32_t preferBFrames = (int32_t)false;
4354    if (msg->findString("ts-schema", &tsSchema)
4355            && (!msg->findInt32("android._prefer-b-frames", &preferBFrames) || !preferBFrames)) {
4356        OMX_VIDEO_PARAM_ANDROID_TEMPORALLAYERINGTYPE layering;
4357        InitOMXParams(&layering);
4358        layering.nPortIndex = kPortIndexOutput;
4359        if (mOMX->getParameter(
4360                        mNode, (OMX_INDEXTYPE)OMX_IndexParamAndroidVideoTemporalLayering,
4361                        &layering, sizeof(layering)) == OK
4362                && layering.eSupportedPatterns
4363                && layering.nBLayerCountMax == 0) {
4364            h264type.nBFrames = 0;
4365            h264type.nPFrames = setPFramesSpacing(iFrameInterval, frameRate, h264type.nBFrames);
4366            h264type.nAllowedPictureTypes &= ~OMX_VIDEO_PictureTypeB;
4367            ALOGI("disabling B-frames");
4368            err = mOMX->setParameter(
4369                    mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
4370
4371            if (err != OK) {
4372                return err;
4373            }
4374        }
4375    }
4376
4377    return configureBitrate(bitrate, bitrateMode);
4378}
4379
4380status_t ACodec::setupHEVCEncoderParameters(const sp<AMessage> &msg) {
4381    int32_t bitrate;
4382    float iFrameInterval;
4383    if (!msg->findInt32("bitrate", &bitrate)
4384            || !msg->findAsFloat("i-frame-interval", &iFrameInterval)) {
4385        return INVALID_OPERATION;
4386    }
4387
4388    OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
4389
4390    float frameRate;
4391    if (!msg->findFloat("frame-rate", &frameRate)) {
4392        int32_t tmp;
4393        if (!msg->findInt32("frame-rate", &tmp)) {
4394            return INVALID_OPERATION;
4395        }
4396        frameRate = (float)tmp;
4397    }
4398
4399    OMX_VIDEO_PARAM_HEVCTYPE hevcType;
4400    InitOMXParams(&hevcType);
4401    hevcType.nPortIndex = kPortIndexOutput;
4402
4403    status_t err = OK;
4404    err = mOMX->getParameter(
4405            mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType));
4406    if (err != OK) {
4407        return err;
4408    }
4409
4410    int32_t profile;
4411    if (msg->findInt32("profile", &profile)) {
4412        int32_t level;
4413        if (!msg->findInt32("level", &level)) {
4414            return INVALID_OPERATION;
4415        }
4416
4417        err = verifySupportForProfileAndLevel(profile, level);
4418        if (err != OK) {
4419            return err;
4420        }
4421
4422        hevcType.eProfile = static_cast<OMX_VIDEO_HEVCPROFILETYPE>(profile);
4423        hevcType.eLevel = static_cast<OMX_VIDEO_HEVCLEVELTYPE>(level);
4424    }
4425    // TODO: finer control?
4426    hevcType.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1;
4427
4428    err = mOMX->setParameter(
4429            mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoHevc, &hevcType, sizeof(hevcType));
4430    if (err != OK) {
4431        return err;
4432    }
4433
4434    return configureBitrate(bitrate, bitrateMode);
4435}
4436
4437status_t ACodec::setupVPXEncoderParameters(const sp<AMessage> &msg, sp<AMessage> &outputFormat) {
4438    int32_t bitrate;
4439    float iFrameInterval = 0;
4440    size_t tsLayers = 0;
4441    OMX_VIDEO_ANDROID_VPXTEMPORALLAYERPATTERNTYPE pattern =
4442        OMX_VIDEO_VPXTemporalLayerPatternNone;
4443    static const uint32_t kVp8LayerRateAlloction
4444        [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS]
4445        [OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS] = {
4446        {100, 100, 100},  // 1 layer
4447        { 60, 100, 100},  // 2 layers {60%, 40%}
4448        { 40,  60, 100},  // 3 layers {40%, 20%, 40%}
4449    };
4450    if (!msg->findInt32("bitrate", &bitrate)) {
4451        return INVALID_OPERATION;
4452    }
4453    msg->findAsFloat("i-frame-interval", &iFrameInterval);
4454
4455    OMX_VIDEO_CONTROLRATETYPE bitrateMode = getBitrateMode(msg);
4456
4457    float frameRate;
4458    if (!msg->findFloat("frame-rate", &frameRate)) {
4459        int32_t tmp;
4460        if (!msg->findInt32("frame-rate", &tmp)) {
4461            return INVALID_OPERATION;
4462        }
4463        frameRate = (float)tmp;
4464    }
4465
4466    AString tsSchema;
4467    OMX_VIDEO_ANDROID_TEMPORALLAYERINGPATTERNTYPE tsType =
4468        OMX_VIDEO_AndroidTemporalLayeringPatternNone;
4469
4470    if (msg->findString("ts-schema", &tsSchema)) {
4471        unsigned int numLayers = 0;
4472        unsigned int numBLayers = 0;
4473        int tags;
4474        char dummy;
4475        if (sscanf(tsSchema.c_str(), "webrtc.vp8.%u-layer%c", &numLayers, &dummy) == 1
4476                && numLayers > 0) {
4477            pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
4478            tsType = OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC;
4479            tsLayers = numLayers;
4480        } else if ((tags = sscanf(tsSchema.c_str(), "android.generic.%u%c%u%c",
4481                        &numLayers, &dummy, &numBLayers, &dummy))
4482                && (tags == 1 || (tags == 3 && dummy == '+'))
4483                && numLayers > 0 && numLayers < UINT32_MAX - numBLayers) {
4484            pattern = OMX_VIDEO_VPXTemporalLayerPatternWebRTC;
4485            // VPX does not have a concept of B-frames, so just count all layers
4486            tsType = OMX_VIDEO_AndroidTemporalLayeringPatternAndroid;
4487            tsLayers = numLayers + numBLayers;
4488        } else {
4489            ALOGW("Ignoring unsupported ts-schema [%s]", tsSchema.c_str());
4490        }
4491        tsLayers = min(tsLayers, (size_t)OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS);
4492    }
4493
4494    OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type;
4495    InitOMXParams(&vp8type);
4496    vp8type.nPortIndex = kPortIndexOutput;
4497    status_t err = mOMX->getParameter(
4498            mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
4499            &vp8type, sizeof(vp8type));
4500
4501    if (err == OK) {
4502        if (iFrameInterval > 0) {
4503            vp8type.nKeyFrameInterval = setPFramesSpacing(iFrameInterval, frameRate) + 1;
4504        }
4505        vp8type.eTemporalPattern = pattern;
4506        vp8type.nTemporalLayerCount = tsLayers;
4507        if (tsLayers > 0) {
4508            for (size_t i = 0; i < OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS; i++) {
4509                vp8type.nTemporalLayerBitrateRatio[i] =
4510                    kVp8LayerRateAlloction[tsLayers - 1][i];
4511            }
4512        }
4513        if (bitrateMode == OMX_Video_ControlRateConstant) {
4514            vp8type.nMinQuantizer = 2;
4515            vp8type.nMaxQuantizer = 63;
4516        }
4517
4518        err = mOMX->setParameter(
4519                mNode, (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
4520                &vp8type, sizeof(vp8type));
4521        if (err != OK) {
4522            ALOGW("Extended VP8 parameters set failed: %d", err);
4523        } else if (tsType == OMX_VIDEO_AndroidTemporalLayeringPatternWebRTC) {
4524            // advertise even single layer WebRTC layering, as it is defined
4525            outputFormat->setString("ts-schema", AStringPrintf("webrtc.vp8.%u-layer", tsLayers));
4526        } else if (tsLayers > 0) {
4527            // tsType == OMX_VIDEO_AndroidTemporalLayeringPatternAndroid
4528            outputFormat->setString("ts-schema", AStringPrintf("android.generic.%u", tsLayers));
4529        }
4530    }
4531
4532    return configureBitrate(bitrate, bitrateMode);
4533}
4534
4535status_t ACodec::verifySupportForProfileAndLevel(
4536        int32_t profile, int32_t level) {
4537    OMX_VIDEO_PARAM_PROFILELEVELTYPE params;
4538    InitOMXParams(&params);
4539    params.nPortIndex = kPortIndexOutput;
4540
4541    for (OMX_U32 index = 0; index <= kMaxIndicesToCheck; ++index) {
4542        params.nProfileIndex = index;
4543        status_t err = mOMX->getParameter(
4544                mNode,
4545                OMX_IndexParamVideoProfileLevelQuerySupported,
4546                &params,
4547                sizeof(params));
4548
4549        if (err != OK) {
4550            return err;
4551        }
4552
4553        int32_t supportedProfile = static_cast<int32_t>(params.eProfile);
4554        int32_t supportedLevel = static_cast<int32_t>(params.eLevel);
4555
4556        if (profile == supportedProfile && level <= supportedLevel) {
4557            return OK;
4558        }
4559
4560        if (index == kMaxIndicesToCheck) {
4561            ALOGW("[%s] stopping checking profiles after %u: %x/%x",
4562                    mComponentName.c_str(), index,
4563                    params.eProfile, params.eLevel);
4564        }
4565    }
4566    return ERROR_UNSUPPORTED;
4567}
4568
4569status_t ACodec::configureBitrate(
4570        int32_t bitrate, OMX_VIDEO_CONTROLRATETYPE bitrateMode) {
4571    OMX_VIDEO_PARAM_BITRATETYPE bitrateType;
4572    InitOMXParams(&bitrateType);
4573    bitrateType.nPortIndex = kPortIndexOutput;
4574
4575    status_t err = mOMX->getParameter(
4576            mNode, OMX_IndexParamVideoBitrate,
4577            &bitrateType, sizeof(bitrateType));
4578
4579    if (err != OK) {
4580        return err;
4581    }
4582
4583    bitrateType.eControlRate = bitrateMode;
4584    bitrateType.nTargetBitrate = bitrate;
4585
4586    return mOMX->setParameter(
4587            mNode, OMX_IndexParamVideoBitrate,
4588            &bitrateType, sizeof(bitrateType));
4589}
4590
4591status_t ACodec::setupErrorCorrectionParameters() {
4592    OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType;
4593    InitOMXParams(&errorCorrectionType);
4594    errorCorrectionType.nPortIndex = kPortIndexOutput;
4595
4596    status_t err = mOMX->getParameter(
4597            mNode, OMX_IndexParamVideoErrorCorrection,
4598            &errorCorrectionType, sizeof(errorCorrectionType));
4599
4600    if (err != OK) {
4601        return OK;  // Optional feature. Ignore this failure
4602    }
4603
4604    errorCorrectionType.bEnableHEC = OMX_FALSE;
4605    errorCorrectionType.bEnableResync = OMX_TRUE;
4606    errorCorrectionType.nResynchMarkerSpacing = 256;
4607    errorCorrectionType.bEnableDataPartitioning = OMX_FALSE;
4608    errorCorrectionType.bEnableRVLC = OMX_FALSE;
4609
4610    return mOMX->setParameter(
4611            mNode, OMX_IndexParamVideoErrorCorrection,
4612            &errorCorrectionType, sizeof(errorCorrectionType));
4613}
4614
4615status_t ACodec::setVideoFormatOnPort(
4616        OMX_U32 portIndex,
4617        int32_t width, int32_t height, OMX_VIDEO_CODINGTYPE compressionFormat,
4618        float frameRate) {
4619    OMX_PARAM_PORTDEFINITIONTYPE def;
4620    InitOMXParams(&def);
4621    def.nPortIndex = portIndex;
4622
4623    OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
4624
4625    status_t err = mOMX->getParameter(
4626            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
4627    if (err != OK) {
4628        return err;
4629    }
4630
4631    if (portIndex == kPortIndexInput) {
4632        // XXX Need a (much) better heuristic to compute input buffer sizes.
4633        const size_t X = 64 * 1024;
4634        if (def.nBufferSize < X) {
4635            def.nBufferSize = X;
4636        }
4637    }
4638
4639    if (def.eDomain != OMX_PortDomainVideo) {
4640        ALOGE("expected video port, got %s(%d)", asString(def.eDomain), def.eDomain);
4641        return FAILED_TRANSACTION;
4642    }
4643
4644    video_def->nFrameWidth = width;
4645    video_def->nFrameHeight = height;
4646
4647    if (portIndex == kPortIndexInput) {
4648        video_def->eCompressionFormat = compressionFormat;
4649        video_def->eColorFormat = OMX_COLOR_FormatUnused;
4650        if (frameRate >= 0) {
4651            video_def->xFramerate = (OMX_U32)(frameRate * 65536.0f);
4652        }
4653    }
4654
4655    err = mOMX->setParameter(
4656            mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
4657
4658    return err;
4659}
4660
4661status_t ACodec::initNativeWindow() {
4662    if (mNativeWindow != NULL) {
4663        return mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_TRUE);
4664    }
4665
4666    mOMX->enableNativeBuffers(mNode, kPortIndexOutput, OMX_TRUE /* graphic */, OMX_FALSE);
4667    return OK;
4668}
4669
4670size_t ACodec::countBuffersOwnedByComponent(OMX_U32 portIndex) const {
4671    size_t n = 0;
4672
4673    for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
4674        const BufferInfo &info = mBuffers[portIndex].itemAt(i);
4675
4676        if (info.mStatus == BufferInfo::OWNED_BY_COMPONENT) {
4677            ++n;
4678        }
4679    }
4680
4681    return n;
4682}
4683
4684size_t ACodec::countBuffersOwnedByNativeWindow() const {
4685    size_t n = 0;
4686
4687    for (size_t i = 0; i < mBuffers[kPortIndexOutput].size(); ++i) {
4688        const BufferInfo &info = mBuffers[kPortIndexOutput].itemAt(i);
4689
4690        if (info.mStatus == BufferInfo::OWNED_BY_NATIVE_WINDOW) {
4691            ++n;
4692        }
4693    }
4694
4695    return n;
4696}
4697
4698void ACodec::waitUntilAllPossibleNativeWindowBuffersAreReturnedToUs() {
4699    if (mNativeWindow == NULL) {
4700        return;
4701    }
4702
4703    while (countBuffersOwnedByNativeWindow() > mNumUndequeuedBuffers
4704            && dequeueBufferFromNativeWindow() != NULL) {
4705        // these buffers will be submitted as regular buffers; account for this
4706        if (storingMetadataInDecodedBuffers() && mMetadataBuffersToSubmit > 0) {
4707            --mMetadataBuffersToSubmit;
4708        }
4709    }
4710}
4711
4712bool ACodec::allYourBuffersAreBelongToUs(
4713        OMX_U32 portIndex) {
4714    for (size_t i = 0; i < mBuffers[portIndex].size(); ++i) {
4715        BufferInfo *info = &mBuffers[portIndex].editItemAt(i);
4716
4717        if (info->mStatus != BufferInfo::OWNED_BY_US
4718                && info->mStatus != BufferInfo::OWNED_BY_NATIVE_WINDOW) {
4719            ALOGV("[%s] Buffer %u on port %u still has status %d",
4720                    mComponentName.c_str(),
4721                    info->mBufferID, portIndex, info->mStatus);
4722            return false;
4723        }
4724    }
4725
4726    return true;
4727}
4728
4729bool ACodec::allYourBuffersAreBelongToUs() {
4730    return allYourBuffersAreBelongToUs(kPortIndexInput)
4731        && allYourBuffersAreBelongToUs(kPortIndexOutput);
4732}
4733
4734void ACodec::deferMessage(const sp<AMessage> &msg) {
4735    mDeferredQueue.push_back(msg);
4736}
4737
4738void ACodec::processDeferredMessages() {
4739    List<sp<AMessage> > queue = mDeferredQueue;
4740    mDeferredQueue.clear();
4741
4742    List<sp<AMessage> >::iterator it = queue.begin();
4743    while (it != queue.end()) {
4744        onMessageReceived(*it++);
4745    }
4746}
4747
4748// static
4749bool ACodec::describeDefaultColorFormat(DescribeColorFormat2Params &params) {
4750    MediaImage2 &image = params.sMediaImage;
4751    memset(&image, 0, sizeof(image));
4752
4753    image.mType = MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
4754    image.mNumPlanes = 0;
4755
4756    const OMX_COLOR_FORMATTYPE fmt = params.eColorFormat;
4757    image.mWidth = params.nFrameWidth;
4758    image.mHeight = params.nFrameHeight;
4759
4760    // only supporting YUV420
4761    if (fmt != OMX_COLOR_FormatYUV420Planar &&
4762        fmt != OMX_COLOR_FormatYUV420PackedPlanar &&
4763        fmt != OMX_COLOR_FormatYUV420SemiPlanar &&
4764        fmt != OMX_COLOR_FormatYUV420PackedSemiPlanar &&
4765        fmt != (OMX_COLOR_FORMATTYPE)HAL_PIXEL_FORMAT_YV12) {
4766        ALOGW("do not know color format 0x%x = %d", fmt, fmt);
4767        return false;
4768    }
4769
4770    // TEMPORARY FIX for some vendors that advertise sliceHeight as 0
4771    if (params.nStride != 0 && params.nSliceHeight == 0) {
4772        ALOGW("using sliceHeight=%u instead of what codec advertised (=0)",
4773                params.nFrameHeight);
4774        params.nSliceHeight = params.nFrameHeight;
4775    }
4776
4777    // we need stride and slice-height to be non-zero and sensible. These values were chosen to
4778    // prevent integer overflows further down the line, and do not indicate support for
4779    // 32kx32k video.
4780    if (params.nStride == 0 || params.nSliceHeight == 0
4781            || params.nStride > 32768 || params.nSliceHeight > 32768) {
4782        ALOGW("cannot describe color format 0x%x = %d with stride=%u and sliceHeight=%u",
4783                fmt, fmt, params.nStride, params.nSliceHeight);
4784        return false;
4785    }
4786
4787    // set-up YUV format
4788    image.mType = MediaImage2::MEDIA_IMAGE_TYPE_YUV;
4789    image.mNumPlanes = 3;
4790    image.mBitDepth = 8;
4791    image.mBitDepthAllocated = 8;
4792    image.mPlane[image.Y].mOffset = 0;
4793    image.mPlane[image.Y].mColInc = 1;
4794    image.mPlane[image.Y].mRowInc = params.nStride;
4795    image.mPlane[image.Y].mHorizSubsampling = 1;
4796    image.mPlane[image.Y].mVertSubsampling = 1;
4797
4798    switch ((int)fmt) {
4799        case HAL_PIXEL_FORMAT_YV12:
4800            if (params.bUsingNativeBuffers) {
4801                size_t ystride = align(params.nStride, 16);
4802                size_t cstride = align(params.nStride / 2, 16);
4803                image.mPlane[image.Y].mRowInc = ystride;
4804
4805                image.mPlane[image.V].mOffset = ystride * params.nSliceHeight;
4806                image.mPlane[image.V].mColInc = 1;
4807                image.mPlane[image.V].mRowInc = cstride;
4808                image.mPlane[image.V].mHorizSubsampling = 2;
4809                image.mPlane[image.V].mVertSubsampling = 2;
4810
4811                image.mPlane[image.U].mOffset = image.mPlane[image.V].mOffset
4812                        + (cstride * params.nSliceHeight / 2);
4813                image.mPlane[image.U].mColInc = 1;
4814                image.mPlane[image.U].mRowInc = cstride;
4815                image.mPlane[image.U].mHorizSubsampling = 2;
4816                image.mPlane[image.U].mVertSubsampling = 2;
4817                break;
4818            } else {
4819                // fall through as YV12 is used for YUV420Planar by some codecs
4820            }
4821
4822        case OMX_COLOR_FormatYUV420Planar:
4823        case OMX_COLOR_FormatYUV420PackedPlanar:
4824            image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
4825            image.mPlane[image.U].mColInc = 1;
4826            image.mPlane[image.U].mRowInc = params.nStride / 2;
4827            image.mPlane[image.U].mHorizSubsampling = 2;
4828            image.mPlane[image.U].mVertSubsampling = 2;
4829
4830            image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset
4831                    + (params.nStride * params.nSliceHeight / 4);
4832            image.mPlane[image.V].mColInc = 1;
4833            image.mPlane[image.V].mRowInc = params.nStride / 2;
4834            image.mPlane[image.V].mHorizSubsampling = 2;
4835            image.mPlane[image.V].mVertSubsampling = 2;
4836            break;
4837
4838        case OMX_COLOR_FormatYUV420SemiPlanar:
4839            // FIXME: NV21 for sw-encoder, NV12 for decoder and hw-encoder
4840        case OMX_COLOR_FormatYUV420PackedSemiPlanar:
4841            // NV12
4842            image.mPlane[image.U].mOffset = params.nStride * params.nSliceHeight;
4843            image.mPlane[image.U].mColInc = 2;
4844            image.mPlane[image.U].mRowInc = params.nStride;
4845            image.mPlane[image.U].mHorizSubsampling = 2;
4846            image.mPlane[image.U].mVertSubsampling = 2;
4847
4848            image.mPlane[image.V].mOffset = image.mPlane[image.U].mOffset + 1;
4849            image.mPlane[image.V].mColInc = 2;
4850            image.mPlane[image.V].mRowInc = params.nStride;
4851            image.mPlane[image.V].mHorizSubsampling = 2;
4852            image.mPlane[image.V].mVertSubsampling = 2;
4853            break;
4854
4855        default:
4856            TRESPASS();
4857    }
4858    return true;
4859}
4860
4861// static
4862bool ACodec::describeColorFormat(
4863        const sp<IOMX> &omx, IOMX::node_id node,
4864        DescribeColorFormat2Params &describeParams)
4865{
4866    OMX_INDEXTYPE describeColorFormatIndex;
4867    if (omx->getExtensionIndex(
4868            node, "OMX.google.android.index.describeColorFormat",
4869            &describeColorFormatIndex) == OK) {
4870        DescribeColorFormatParams describeParamsV1(describeParams);
4871        if (omx->getParameter(
4872                node, describeColorFormatIndex,
4873                &describeParamsV1, sizeof(describeParamsV1)) == OK) {
4874            describeParams.initFromV1(describeParamsV1);
4875            return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
4876        }
4877    } else if (omx->getExtensionIndex(
4878            node, "OMX.google.android.index.describeColorFormat2", &describeColorFormatIndex) == OK
4879               && omx->getParameter(
4880            node, describeColorFormatIndex, &describeParams, sizeof(describeParams)) == OK) {
4881        return describeParams.sMediaImage.mType != MediaImage2::MEDIA_IMAGE_TYPE_UNKNOWN;
4882    }
4883
4884    return describeDefaultColorFormat(describeParams);
4885}
4886
4887// static
4888bool ACodec::isFlexibleColorFormat(
4889         const sp<IOMX> &omx, IOMX::node_id node,
4890         uint32_t colorFormat, bool usingNativeBuffers, OMX_U32 *flexibleEquivalent) {
4891    DescribeColorFormat2Params describeParams;
4892    InitOMXParams(&describeParams);
4893    describeParams.eColorFormat = (OMX_COLOR_FORMATTYPE)colorFormat;
4894    // reasonable dummy values
4895    describeParams.nFrameWidth = 128;
4896    describeParams.nFrameHeight = 128;
4897    describeParams.nStride = 128;
4898    describeParams.nSliceHeight = 128;
4899    describeParams.bUsingNativeBuffers = (OMX_BOOL)usingNativeBuffers;
4900
4901    CHECK(flexibleEquivalent != NULL);
4902
4903    if (!describeColorFormat(omx, node, describeParams)) {
4904        return false;
4905    }
4906
4907    const MediaImage2 &img = describeParams.sMediaImage;
4908    if (img.mType == MediaImage2::MEDIA_IMAGE_TYPE_YUV) {
4909        if (img.mNumPlanes != 3
4910                || img.mPlane[img.Y].mHorizSubsampling != 1
4911                || img.mPlane[img.Y].mVertSubsampling != 1) {
4912            return false;
4913        }
4914
4915        // YUV 420
4916        if (img.mPlane[img.U].mHorizSubsampling == 2
4917                && img.mPlane[img.U].mVertSubsampling == 2
4918                && img.mPlane[img.V].mHorizSubsampling == 2
4919                && img.mPlane[img.V].mVertSubsampling == 2) {
4920            // possible flexible YUV420 format
4921            if (img.mBitDepth <= 8) {
4922               *flexibleEquivalent = OMX_COLOR_FormatYUV420Flexible;
4923               return true;
4924            }
4925        }
4926    }
4927    return false;
4928}
4929
4930status_t ACodec::getPortFormat(OMX_U32 portIndex, sp<AMessage> &notify) {
4931    const char *niceIndex = portIndex == kPortIndexInput ? "input" : "output";
4932    OMX_PARAM_PORTDEFINITIONTYPE def;
4933    InitOMXParams(&def);
4934    def.nPortIndex = portIndex;
4935
4936    status_t err = mOMX->getParameter(mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
4937    if (err != OK) {
4938        return err;
4939    }
4940
4941    if (def.eDir != (portIndex == kPortIndexOutput ? OMX_DirOutput : OMX_DirInput)) {
4942        ALOGE("unexpected dir: %s(%d) on %s port", asString(def.eDir), def.eDir, niceIndex);
4943        return BAD_VALUE;
4944    }
4945
4946    switch (def.eDomain) {
4947        case OMX_PortDomainVideo:
4948        {
4949            OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video;
4950            switch ((int)videoDef->eCompressionFormat) {
4951                case OMX_VIDEO_CodingUnused:
4952                {
4953                    CHECK(mIsEncoder ^ (portIndex == kPortIndexOutput));
4954                    notify->setString("mime", MEDIA_MIMETYPE_VIDEO_RAW);
4955
4956                    notify->setInt32("stride", videoDef->nStride);
4957                    notify->setInt32("slice-height", videoDef->nSliceHeight);
4958                    notify->setInt32("color-format", videoDef->eColorFormat);
4959
4960                    if (mNativeWindow == NULL) {
4961                        DescribeColorFormat2Params describeParams;
4962                        InitOMXParams(&describeParams);
4963                        describeParams.eColorFormat = videoDef->eColorFormat;
4964                        describeParams.nFrameWidth = videoDef->nFrameWidth;
4965                        describeParams.nFrameHeight = videoDef->nFrameHeight;
4966                        describeParams.nStride = videoDef->nStride;
4967                        describeParams.nSliceHeight = videoDef->nSliceHeight;
4968                        describeParams.bUsingNativeBuffers = OMX_FALSE;
4969
4970                        if (describeColorFormat(mOMX, mNode, describeParams)) {
4971                            notify->setBuffer(
4972                                    "image-data",
4973                                    ABuffer::CreateAsCopy(
4974                                            &describeParams.sMediaImage,
4975                                            sizeof(describeParams.sMediaImage)));
4976
4977                            MediaImage2 &img = describeParams.sMediaImage;
4978                            MediaImage2::PlaneInfo *plane = img.mPlane;
4979                            ALOGV("[%s] MediaImage { F(%ux%u) @%u+%d+%d @%u+%d+%d @%u+%d+%d }",
4980                                    mComponentName.c_str(), img.mWidth, img.mHeight,
4981                                    plane[0].mOffset, plane[0].mColInc, plane[0].mRowInc,
4982                                    plane[1].mOffset, plane[1].mColInc, plane[1].mRowInc,
4983                                    plane[2].mOffset, plane[2].mColInc, plane[2].mRowInc);
4984                        }
4985                    }
4986
4987                    int32_t width = (int32_t)videoDef->nFrameWidth;
4988                    int32_t height = (int32_t)videoDef->nFrameHeight;
4989
4990                    if (portIndex == kPortIndexOutput) {
4991                        OMX_CONFIG_RECTTYPE rect;
4992                        InitOMXParams(&rect);
4993                        rect.nPortIndex = portIndex;
4994
4995                        if (mOMX->getConfig(
4996                                    mNode,
4997                                    (portIndex == kPortIndexOutput ?
4998                                            OMX_IndexConfigCommonOutputCrop :
4999                                            OMX_IndexConfigCommonInputCrop),
5000                                    &rect, sizeof(rect)) != OK) {
5001                            rect.nLeft = 0;
5002                            rect.nTop = 0;
5003                            rect.nWidth = videoDef->nFrameWidth;
5004                            rect.nHeight = videoDef->nFrameHeight;
5005                        }
5006
5007                        if (rect.nLeft < 0 ||
5008                            rect.nTop < 0 ||
5009                            rect.nLeft + rect.nWidth > videoDef->nFrameWidth ||
5010                            rect.nTop + rect.nHeight > videoDef->nFrameHeight) {
5011                            ALOGE("Wrong cropped rect (%d, %d) - (%u, %u) vs. frame (%u, %u)",
5012                                    rect.nLeft, rect.nTop,
5013                                    rect.nLeft + rect.nWidth, rect.nTop + rect.nHeight,
5014                                    videoDef->nFrameWidth, videoDef->nFrameHeight);
5015                            return BAD_VALUE;
5016                        }
5017
5018                        notify->setRect(
5019                                "crop",
5020                                rect.nLeft,
5021                                rect.nTop,
5022                                rect.nLeft + rect.nWidth - 1,
5023                                rect.nTop + rect.nHeight - 1);
5024
5025                        width = rect.nWidth;
5026                        height = rect.nHeight;
5027
5028                        android_dataspace dataSpace = HAL_DATASPACE_UNKNOWN;
5029                        (void)getColorAspectsAndDataSpaceForVideoDecoder(
5030                                width, height, mConfigFormat, notify,
5031                                mUsingNativeWindow ? &dataSpace : NULL);
5032                        if (mUsingNativeWindow) {
5033                            notify->setInt32("android._dataspace", dataSpace);
5034                        }
5035                        (void)getHDRStaticInfoForVideoCodec(kPortIndexOutput, notify);
5036                    } else {
5037                        (void)getInputColorAspectsForVideoEncoder(notify);
5038                        if (mConfigFormat->contains("hdr-static-info")) {
5039                            (void)getHDRStaticInfoForVideoCodec(kPortIndexInput, notify);
5040                        }
5041                    }
5042
5043                    break;
5044                }
5045
5046                case OMX_VIDEO_CodingVP8:
5047                case OMX_VIDEO_CodingVP9:
5048                {
5049                    OMX_VIDEO_PARAM_ANDROID_VP8ENCODERTYPE vp8type;
5050                    InitOMXParams(&vp8type);
5051                    vp8type.nPortIndex = kPortIndexOutput;
5052                    status_t err = mOMX->getParameter(
5053                            mNode,
5054                            (OMX_INDEXTYPE)OMX_IndexParamVideoAndroidVp8Encoder,
5055                            &vp8type,
5056                            sizeof(vp8type));
5057
5058                    if (err == OK) {
5059                        if (vp8type.eTemporalPattern == OMX_VIDEO_VPXTemporalLayerPatternWebRTC
5060                                && vp8type.nTemporalLayerCount > 0
5061                                && vp8type.nTemporalLayerCount
5062                                        <= OMX_VIDEO_ANDROID_MAXVP8TEMPORALLAYERS) {
5063                            // advertise as android.generic if we configured for android.generic
5064                            AString origSchema;
5065                            if (notify->findString("ts-schema", &origSchema)
5066                                    && origSchema.startsWith("android.generic")) {
5067                                notify->setString("ts-schema", AStringPrintf(
5068                                        "android.generic.%u", vp8type.nTemporalLayerCount));
5069                            } else {
5070                                notify->setString("ts-schema", AStringPrintf(
5071                                        "webrtc.vp8.%u-layer", vp8type.nTemporalLayerCount));
5072                            }
5073                        }
5074                    }
5075                    // Fall through to set up mime.
5076                }
5077
5078                default:
5079                {
5080                    if (mIsEncoder ^ (portIndex == kPortIndexOutput)) {
5081                        // should be CodingUnused
5082                        ALOGE("Raw port video compression format is %s(%d)",
5083                                asString(videoDef->eCompressionFormat),
5084                                videoDef->eCompressionFormat);
5085                        return BAD_VALUE;
5086                    }
5087                    AString mime;
5088                    if (GetMimeTypeForVideoCoding(
5089                        videoDef->eCompressionFormat, &mime) != OK) {
5090                        notify->setString("mime", "application/octet-stream");
5091                    } else {
5092                        notify->setString("mime", mime.c_str());
5093                    }
5094                    uint32_t intraRefreshPeriod = 0;
5095                    if (mIsEncoder && getIntraRefreshPeriod(&intraRefreshPeriod) == OK
5096                            && intraRefreshPeriod > 0) {
5097                        notify->setInt32("intra-refresh-period", intraRefreshPeriod);
5098                    }
5099                    break;
5100                }
5101            }
5102            notify->setInt32("width", videoDef->nFrameWidth);
5103            notify->setInt32("height", videoDef->nFrameHeight);
5104            ALOGV("[%s] %s format is %s", mComponentName.c_str(),
5105                    portIndex == kPortIndexInput ? "input" : "output",
5106                    notify->debugString().c_str());
5107
5108            break;
5109        }
5110
5111        case OMX_PortDomainAudio:
5112        {
5113            OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio;
5114
5115            switch ((int)audioDef->eEncoding) {
5116                case OMX_AUDIO_CodingPCM:
5117                {
5118                    OMX_AUDIO_PARAM_PCMMODETYPE params;
5119                    InitOMXParams(&params);
5120                    params.nPortIndex = portIndex;
5121
5122                    err = mOMX->getParameter(
5123                            mNode, OMX_IndexParamAudioPcm, &params, sizeof(params));
5124                    if (err != OK) {
5125                        return err;
5126                    }
5127
5128                    if (params.nChannels <= 0
5129                            || (params.nChannels != 1 && !params.bInterleaved)
5130                            || params.ePCMMode != OMX_AUDIO_PCMModeLinear) {
5131                        ALOGE("unsupported PCM port: %u channels%s, %u-bit",
5132                                params.nChannels,
5133                                params.bInterleaved ? " interleaved" : "",
5134                                params.nBitPerSample);
5135                        return FAILED_TRANSACTION;
5136                    }
5137
5138                    notify->setString("mime", MEDIA_MIMETYPE_AUDIO_RAW);
5139                    notify->setInt32("channel-count", params.nChannels);
5140                    notify->setInt32("sample-rate", params.nSamplingRate);
5141
5142                    AudioEncoding encoding = kAudioEncodingPcm16bit;
5143                    if (params.eNumData == OMX_NumericalDataUnsigned
5144                            && params.nBitPerSample == 8u) {
5145                        encoding = kAudioEncodingPcm8bit;
5146                    } else if (params.eNumData == OMX_NumericalDataFloat
5147                            && params.nBitPerSample == 32u) {
5148                        encoding = kAudioEncodingPcmFloat;
5149                    } else if (params.nBitPerSample != 16u
5150                            || params.eNumData != OMX_NumericalDataSigned) {
5151                        ALOGE("unsupported PCM port: %s(%d), %s(%d) mode ",
5152                                asString(params.eNumData), params.eNumData,
5153                                asString(params.ePCMMode), params.ePCMMode);
5154                        return FAILED_TRANSACTION;
5155                    }
5156                    notify->setInt32("pcm-encoding", encoding);
5157
5158                    if (mChannelMaskPresent) {
5159                        notify->setInt32("channel-mask", mChannelMask);
5160                    }
5161                    break;
5162                }
5163
5164                case OMX_AUDIO_CodingAAC:
5165                {
5166                    OMX_AUDIO_PARAM_AACPROFILETYPE params;
5167                    InitOMXParams(&params);
5168                    params.nPortIndex = portIndex;
5169
5170                    err = mOMX->getParameter(
5171                            mNode, OMX_IndexParamAudioAac, &params, sizeof(params));
5172                    if (err != OK) {
5173                        return err;
5174                    }
5175
5176                    notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AAC);
5177                    notify->setInt32("channel-count", params.nChannels);
5178                    notify->setInt32("sample-rate", params.nSampleRate);
5179                    break;
5180                }
5181
5182                case OMX_AUDIO_CodingAMR:
5183                {
5184                    OMX_AUDIO_PARAM_AMRTYPE params;
5185                    InitOMXParams(&params);
5186                    params.nPortIndex = portIndex;
5187
5188                    err = mOMX->getParameter(
5189                            mNode, OMX_IndexParamAudioAmr, &params, sizeof(params));
5190                    if (err != OK) {
5191                        return err;
5192                    }
5193
5194                    notify->setInt32("channel-count", 1);
5195                    if (params.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0) {
5196                        notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_WB);
5197                        notify->setInt32("sample-rate", 16000);
5198                    } else {
5199                        notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AMR_NB);
5200                        notify->setInt32("sample-rate", 8000);
5201                    }
5202                    break;
5203                }
5204
5205                case OMX_AUDIO_CodingFLAC:
5206                {
5207                    OMX_AUDIO_PARAM_FLACTYPE params;
5208                    InitOMXParams(&params);
5209                    params.nPortIndex = portIndex;
5210
5211                    err = mOMX->getParameter(
5212                            mNode, OMX_IndexParamAudioFlac, &params, sizeof(params));
5213                    if (err != OK) {
5214                        return err;
5215                    }
5216
5217                    notify->setString("mime", MEDIA_MIMETYPE_AUDIO_FLAC);
5218                    notify->setInt32("channel-count", params.nChannels);
5219                    notify->setInt32("sample-rate", params.nSampleRate);
5220                    break;
5221                }
5222
5223                case OMX_AUDIO_CodingMP3:
5224                {
5225                    OMX_AUDIO_PARAM_MP3TYPE params;
5226                    InitOMXParams(&params);
5227                    params.nPortIndex = portIndex;
5228
5229                    err = mOMX->getParameter(
5230                            mNode, OMX_IndexParamAudioMp3, &params, sizeof(params));
5231                    if (err != OK) {
5232                        return err;
5233                    }
5234
5235                    notify->setString("mime", MEDIA_MIMETYPE_AUDIO_MPEG);
5236                    notify->setInt32("channel-count", params.nChannels);
5237                    notify->setInt32("sample-rate", params.nSampleRate);
5238                    break;
5239                }
5240
5241                case OMX_AUDIO_CodingVORBIS:
5242                {
5243                    OMX_AUDIO_PARAM_VORBISTYPE params;
5244                    InitOMXParams(&params);
5245                    params.nPortIndex = portIndex;
5246
5247                    err = mOMX->getParameter(
5248                            mNode, OMX_IndexParamAudioVorbis, &params, sizeof(params));
5249                    if (err != OK) {
5250                        return err;
5251                    }
5252
5253                    notify->setString("mime", MEDIA_MIMETYPE_AUDIO_VORBIS);
5254                    notify->setInt32("channel-count", params.nChannels);
5255                    notify->setInt32("sample-rate", params.nSampleRate);
5256                    break;
5257                }
5258
5259                case OMX_AUDIO_CodingAndroidAC3:
5260                {
5261                    OMX_AUDIO_PARAM_ANDROID_AC3TYPE params;
5262                    InitOMXParams(&params);
5263                    params.nPortIndex = portIndex;
5264
5265                    err = mOMX->getParameter(
5266                            mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
5267                            &params, sizeof(params));
5268                    if (err != OK) {
5269                        return err;
5270                    }
5271
5272                    notify->setString("mime", MEDIA_MIMETYPE_AUDIO_AC3);
5273                    notify->setInt32("channel-count", params.nChannels);
5274                    notify->setInt32("sample-rate", params.nSampleRate);
5275                    break;
5276                }
5277
5278                case OMX_AUDIO_CodingAndroidEAC3:
5279                {
5280                    OMX_AUDIO_PARAM_ANDROID_EAC3TYPE params;
5281                    InitOMXParams(&params);
5282                    params.nPortIndex = portIndex;
5283
5284                    err = mOMX->getParameter(
5285                            mNode, (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidEac3,
5286                            &params, sizeof(params));
5287                    if (err != OK) {
5288                        return err;
5289                    }
5290
5291                    notify->setString("mime", MEDIA_MIMETYPE_AUDIO_EAC3);
5292                    notify->setInt32("channel-count", params.nChannels);
5293                    notify->setInt32("sample-rate", params.nSampleRate);
5294                    break;
5295                }
5296
5297                case OMX_AUDIO_CodingAndroidOPUS:
5298                {
5299                    OMX_AUDIO_PARAM_ANDROID_OPUSTYPE params;
5300                    InitOMXParams(&params);
5301                    params.nPortIndex = portIndex;
5302