SoftAVC.cpp revision 486247017e0bfb9db9994a53a28c0dacfebf8567
1/* 2 * Copyright (C) 2011 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17//#define LOG_NDEBUG 0 18#define LOG_TAG "SoftAVC" 19#include <utils/Log.h> 20 21#include "SoftAVC.h" 22 23#include <media/stagefright/foundation/ADebug.h> 24#include <media/stagefright/MediaDefs.h> 25#include <media/stagefright/MediaErrors.h> 26 27 28namespace android { 29 30template<class T> 31static void InitOMXParams(T *params) { 32 params->nSize = sizeof(T); 33 params->nVersion.s.nVersionMajor = 1; 34 params->nVersion.s.nVersionMinor = 0; 35 params->nVersion.s.nRevision = 0; 36 params->nVersion.s.nStep = 0; 37} 38 39SoftAVC::SoftAVC( 40 const char *name, 41 const OMX_CALLBACKTYPE *callbacks, 42 OMX_PTR appData, 43 OMX_COMPONENTTYPE **component) 44 : SimpleSoftOMXComponent(name, callbacks, appData, component), 45 mHandle(NULL), 46 mInputBufferCount(0), 47 mWidth(320), 48 mHeight(240), 49 mPictureSize(mWidth * mHeight * 3 / 2), 50 mCropLeft(0), 51 mCropTop(0), 52 mCropWidth(mWidth), 53 mCropHeight(mHeight), 54 mFirstPicture(NULL), 55 mFirstPictureId(-1), 56 mPicId(0), 57 mHeadersDecoded(false), 58 mEOSStatus(INPUT_DATA_AVAILABLE), 59 mOutputPortSettingsChange(NONE) { 60 initPorts(); 61 CHECK_EQ(initDecoder(), (status_t)OK); 62} 63 64SoftAVC::~SoftAVC() { 65 H264SwDecRelease(mHandle); 66 mHandle = NULL; 67 68 while (mPicToHeaderMap.size() != 0) { 69 OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.editValueAt(0); 70 mPicToHeaderMap.removeItemsAt(0); 71 delete header; 72 header = NULL; 73 } 74 List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex); 75 List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex); 76 CHECK(outQueue.empty()); 77 CHECK(inQueue.empty()); 78 79 delete[] mFirstPicture; 80} 81 82void SoftAVC::initPorts() { 83 OMX_PARAM_PORTDEFINITIONTYPE def; 84 InitOMXParams(&def); 85 86 def.nPortIndex = kInputPortIndex; 87 def.eDir = OMX_DirInput; 88 def.nBufferCountMin = kNumInputBuffers; 89 def.nBufferCountActual = def.nBufferCountMin; 90 def.nBufferSize = 8192; 91 def.bEnabled = OMX_TRUE; 92 def.bPopulated = OMX_FALSE; 93 def.eDomain = OMX_PortDomainVideo; 94 def.bBuffersContiguous = OMX_FALSE; 95 def.nBufferAlignment = 1; 96 97 def.format.video.cMIMEType = const_cast<char *>(MEDIA_MIMETYPE_VIDEO_AVC); 98 def.format.video.pNativeRender = NULL; 99 def.format.video.nFrameWidth = mWidth; 100 def.format.video.nFrameHeight = mHeight; 101 def.format.video.nStride = def.format.video.nFrameWidth; 102 def.format.video.nSliceHeight = def.format.video.nFrameHeight; 103 def.format.video.nBitrate = 0; 104 def.format.video.xFramerate = 0; 105 def.format.video.bFlagErrorConcealment = OMX_FALSE; 106 def.format.video.eCompressionFormat = OMX_VIDEO_CodingAVC; 107 def.format.video.eColorFormat = OMX_COLOR_FormatUnused; 108 def.format.video.pNativeWindow = NULL; 109 110 addPort(def); 111 112 def.nPortIndex = kOutputPortIndex; 113 def.eDir = OMX_DirOutput; 114 def.nBufferCountMin = kNumOutputBuffers; 115 def.nBufferCountActual = def.nBufferCountMin; 116 def.bEnabled = OMX_TRUE; 117 def.bPopulated = OMX_FALSE; 118 def.eDomain = OMX_PortDomainVideo; 119 def.bBuffersContiguous = OMX_FALSE; 120 def.nBufferAlignment = 2; 121 122 def.format.video.cMIMEType = const_cast<char *>(MEDIA_MIMETYPE_VIDEO_RAW); 123 def.format.video.pNativeRender = NULL; 124 def.format.video.nFrameWidth = mWidth; 125 def.format.video.nFrameHeight = mHeight; 126 def.format.video.nStride = def.format.video.nFrameWidth; 127 def.format.video.nSliceHeight = def.format.video.nFrameHeight; 128 def.format.video.nBitrate = 0; 129 def.format.video.xFramerate = 0; 130 def.format.video.bFlagErrorConcealment = OMX_FALSE; 131 def.format.video.eCompressionFormat = OMX_VIDEO_CodingUnused; 132 def.format.video.eColorFormat = OMX_COLOR_FormatYUV420Planar; 133 def.format.video.pNativeWindow = NULL; 134 135 def.nBufferSize = 136 (def.format.video.nFrameWidth * def.format.video.nFrameHeight * 3) / 2; 137 138 addPort(def); 139} 140 141status_t SoftAVC::initDecoder() { 142 // Force decoder to output buffers in display order. 143 if (H264SwDecInit(&mHandle, 0) == H264SWDEC_OK) { 144 return OK; 145 } 146 return UNKNOWN_ERROR; 147} 148 149OMX_ERRORTYPE SoftAVC::internalGetParameter( 150 OMX_INDEXTYPE index, OMX_PTR params) { 151 switch (index) { 152 case OMX_IndexParamVideoPortFormat: 153 { 154 OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams = 155 (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params; 156 157 if (formatParams->nPortIndex > kOutputPortIndex) { 158 return OMX_ErrorUndefined; 159 } 160 161 if (formatParams->nIndex != 0) { 162 return OMX_ErrorNoMore; 163 } 164 165 if (formatParams->nPortIndex == kInputPortIndex) { 166 formatParams->eCompressionFormat = OMX_VIDEO_CodingAVC; 167 formatParams->eColorFormat = OMX_COLOR_FormatUnused; 168 formatParams->xFramerate = 0; 169 } else { 170 CHECK(formatParams->nPortIndex == kOutputPortIndex); 171 172 formatParams->eCompressionFormat = OMX_VIDEO_CodingUnused; 173 formatParams->eColorFormat = OMX_COLOR_FormatYUV420Planar; 174 formatParams->xFramerate = 0; 175 } 176 177 return OMX_ErrorNone; 178 } 179 180 default: 181 return SimpleSoftOMXComponent::internalGetParameter(index, params); 182 } 183} 184 185OMX_ERRORTYPE SoftAVC::internalSetParameter( 186 OMX_INDEXTYPE index, const OMX_PTR params) { 187 switch (index) { 188 case OMX_IndexParamStandardComponentRole: 189 { 190 const OMX_PARAM_COMPONENTROLETYPE *roleParams = 191 (const OMX_PARAM_COMPONENTROLETYPE *)params; 192 193 if (strncmp((const char *)roleParams->cRole, 194 "video_decoder.avc", 195 OMX_MAX_STRINGNAME_SIZE - 1)) { 196 return OMX_ErrorUndefined; 197 } 198 199 return OMX_ErrorNone; 200 } 201 202 case OMX_IndexParamVideoPortFormat: 203 { 204 OMX_VIDEO_PARAM_PORTFORMATTYPE *formatParams = 205 (OMX_VIDEO_PARAM_PORTFORMATTYPE *)params; 206 207 if (formatParams->nPortIndex > kOutputPortIndex) { 208 return OMX_ErrorUndefined; 209 } 210 211 if (formatParams->nIndex != 0) { 212 return OMX_ErrorNoMore; 213 } 214 215 return OMX_ErrorNone; 216 } 217 218 default: 219 return SimpleSoftOMXComponent::internalSetParameter(index, params); 220 } 221} 222 223OMX_ERRORTYPE SoftAVC::getConfig( 224 OMX_INDEXTYPE index, OMX_PTR params) { 225 switch (index) { 226 case OMX_IndexConfigCommonOutputCrop: 227 { 228 OMX_CONFIG_RECTTYPE *rectParams = (OMX_CONFIG_RECTTYPE *)params; 229 230 if (rectParams->nPortIndex != 1) { 231 return OMX_ErrorUndefined; 232 } 233 234 rectParams->nLeft = mCropLeft; 235 rectParams->nTop = mCropTop; 236 rectParams->nWidth = mCropWidth; 237 rectParams->nHeight = mCropHeight; 238 239 return OMX_ErrorNone; 240 } 241 242 default: 243 return OMX_ErrorUnsupportedIndex; 244 } 245} 246 247void SoftAVC::onQueueFilled(OMX_U32 portIndex) { 248 if (mOutputPortSettingsChange != NONE) { 249 return; 250 } 251 252 if (mEOSStatus == OUTPUT_FRAMES_FLUSHED) { 253 return; 254 } 255 256 List<BufferInfo *> &inQueue = getPortQueue(kInputPortIndex); 257 List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex); 258 H264SwDecRet ret = H264SWDEC_PIC_RDY; 259 status_t err = OK; 260 bool portSettingsChanged = false; 261 while ((mEOSStatus != INPUT_DATA_AVAILABLE || !inQueue.empty()) 262 && outQueue.size() == kNumOutputBuffers) { 263 264 if (mEOSStatus == INPUT_EOS_SEEN) { 265 drainAllOutputBuffers(); 266 return; 267 } 268 269 BufferInfo *inInfo = *inQueue.begin(); 270 OMX_BUFFERHEADERTYPE *inHeader = inInfo->mHeader; 271 ++mPicId; 272 if (inHeader->nFlags & OMX_BUFFERFLAG_EOS) { 273 inQueue.erase(inQueue.begin()); 274 inInfo->mOwnedByUs = false; 275 notifyEmptyBufferDone(inHeader); 276 mEOSStatus = INPUT_EOS_SEEN; 277 continue; 278 } 279 280 OMX_BUFFERHEADERTYPE *header = new OMX_BUFFERHEADERTYPE; 281 memset(header, 0, sizeof(OMX_BUFFERHEADERTYPE)); 282 header->nTimeStamp = inHeader->nTimeStamp; 283 header->nFlags = inHeader->nFlags; 284 mPicToHeaderMap.add(mPicId, header); 285 inQueue.erase(inQueue.begin()); 286 287 H264SwDecInput inPicture; 288 H264SwDecOutput outPicture; 289 memset(&inPicture, 0, sizeof(inPicture)); 290 inPicture.dataLen = inHeader->nFilledLen; 291 inPicture.pStream = inHeader->pBuffer + inHeader->nOffset; 292 inPicture.picId = mPicId; 293 inPicture.intraConcealmentMethod = 1; 294 H264SwDecPicture decodedPicture; 295 296 while (inPicture.dataLen > 0) { 297 ret = H264SwDecDecode(mHandle, &inPicture, &outPicture); 298 if (ret == H264SWDEC_HDRS_RDY_BUFF_NOT_EMPTY || 299 ret == H264SWDEC_PIC_RDY_BUFF_NOT_EMPTY) { 300 inPicture.dataLen -= (u32)(outPicture.pStrmCurrPos - inPicture.pStream); 301 inPicture.pStream = outPicture.pStrmCurrPos; 302 if (ret == H264SWDEC_HDRS_RDY_BUFF_NOT_EMPTY) { 303 mHeadersDecoded = true; 304 H264SwDecInfo decoderInfo; 305 CHECK(H264SwDecGetInfo(mHandle, &decoderInfo) == H264SWDEC_OK); 306 307 if (handlePortSettingChangeEvent(&decoderInfo)) { 308 portSettingsChanged = true; 309 } 310 311 if (decoderInfo.croppingFlag && 312 handleCropRectEvent(&decoderInfo.cropParams)) { 313 portSettingsChanged = true; 314 } 315 } 316 } else { 317 if (portSettingsChanged) { 318 if (H264SwDecNextPicture(mHandle, &decodedPicture, 0) 319 == H264SWDEC_PIC_RDY) { 320 321 // Save this output buffer; otherwise, it will be 322 // lost during dynamic port reconfiguration because 323 // OpenMAX client will delete _all_ output buffers 324 // in the process. 325 saveFirstOutputBuffer( 326 decodedPicture.picId, 327 (uint8_t *)decodedPicture.pOutputPicture); 328 } 329 } 330 inPicture.dataLen = 0; 331 if (ret < 0) { 332 LOGE("Decoder failed: %d", ret); 333 err = ERROR_MALFORMED; 334 } 335 } 336 } 337 inInfo->mOwnedByUs = false; 338 notifyEmptyBufferDone(inHeader); 339 340 if (portSettingsChanged) { 341 portSettingsChanged = false; 342 return; 343 } 344 345 if (mFirstPicture && !outQueue.empty()) { 346 drainOneOutputBuffer(mFirstPictureId, mFirstPicture); 347 delete[] mFirstPicture; 348 mFirstPicture = NULL; 349 mFirstPictureId = -1; 350 } 351 352 while (!outQueue.empty() && 353 mHeadersDecoded && 354 H264SwDecNextPicture(mHandle, &decodedPicture, 0) 355 == H264SWDEC_PIC_RDY) { 356 357 int32_t picId = decodedPicture.picId; 358 uint8_t *data = (uint8_t *) decodedPicture.pOutputPicture; 359 drainOneOutputBuffer(picId, data); 360 } 361 362 if (err != OK) { 363 notify(OMX_EventError, OMX_ErrorUndefined, err, NULL); 364 } 365 } 366} 367 368bool SoftAVC::handlePortSettingChangeEvent(const H264SwDecInfo *info) { 369 if (mWidth != info->picWidth || mHeight != info->picHeight) { 370 mWidth = info->picWidth; 371 mHeight = info->picHeight; 372 mPictureSize = mWidth * mHeight * 3 / 2; 373 mCropWidth = mWidth; 374 mCropHeight = mHeight; 375 updatePortDefinitions(); 376 notify(OMX_EventPortSettingsChanged, 1, 0, NULL); 377 mOutputPortSettingsChange = AWAITING_DISABLED; 378 return true; 379 } 380 381 return false; 382} 383 384bool SoftAVC::handleCropRectEvent(const CropParams *crop) { 385 if (mCropLeft != crop->cropLeftOffset || 386 mCropTop != crop->cropTopOffset || 387 mCropWidth != crop->cropOutWidth || 388 mCropHeight != crop->cropOutHeight) { 389 mCropLeft = crop->cropLeftOffset; 390 mCropTop = crop->cropTopOffset; 391 mCropWidth = crop->cropOutWidth; 392 mCropHeight = crop->cropOutHeight; 393 394 notify(OMX_EventPortSettingsChanged, 1, 395 OMX_IndexConfigCommonOutputCrop, NULL); 396 397 return true; 398 } 399 return false; 400} 401 402void SoftAVC::saveFirstOutputBuffer(int32_t picId, uint8_t *data) { 403 CHECK(mFirstPicture == NULL); 404 mFirstPictureId = picId; 405 406 mFirstPicture = new uint8_t[mPictureSize]; 407 memcpy(mFirstPicture, data, mPictureSize); 408} 409 410void SoftAVC::drainOneOutputBuffer(int32_t picId, uint8_t* data) { 411 List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex); 412 BufferInfo *outInfo = *outQueue.begin(); 413 outQueue.erase(outQueue.begin()); 414 OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader; 415 OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.valueFor(picId); 416 outHeader->nTimeStamp = header->nTimeStamp; 417 outHeader->nFlags = header->nFlags; 418 outHeader->nFilledLen = mPictureSize; 419 memcpy(outHeader->pBuffer + outHeader->nOffset, 420 data, mPictureSize); 421 mPicToHeaderMap.removeItem(picId); 422 delete header; 423 outInfo->mOwnedByUs = false; 424 notifyFillBufferDone(outHeader); 425} 426 427bool SoftAVC::drainAllOutputBuffers() { 428 List<BufferInfo *> &outQueue = getPortQueue(kOutputPortIndex); 429 H264SwDecPicture decodedPicture; 430 431 while (!outQueue.empty()) { 432 BufferInfo *outInfo = *outQueue.begin(); 433 outQueue.erase(outQueue.begin()); 434 OMX_BUFFERHEADERTYPE *outHeader = outInfo->mHeader; 435 if (mHeadersDecoded && 436 H264SWDEC_PIC_RDY == 437 H264SwDecNextPicture(mHandle, &decodedPicture, 1 /* flush */)) { 438 439 int32_t picId = decodedPicture.picId; 440 CHECK(mPicToHeaderMap.indexOfKey(picId) >= 0); 441 442 memcpy(outHeader->pBuffer + outHeader->nOffset, 443 decodedPicture.pOutputPicture, 444 mPictureSize); 445 446 OMX_BUFFERHEADERTYPE *header = mPicToHeaderMap.valueFor(picId); 447 outHeader->nTimeStamp = header->nTimeStamp; 448 outHeader->nFlags = header->nFlags; 449 outHeader->nFilledLen = mPictureSize; 450 mPicToHeaderMap.removeItem(picId); 451 delete header; 452 } else { 453 outHeader->nTimeStamp = 0; 454 outHeader->nFilledLen = 0; 455 outHeader->nFlags = OMX_BUFFERFLAG_EOS; 456 mEOSStatus = OUTPUT_FRAMES_FLUSHED; 457 } 458 459 outInfo->mOwnedByUs = false; 460 notifyFillBufferDone(outHeader); 461 } 462 463 return true; 464} 465 466void SoftAVC::onPortFlushCompleted(OMX_U32 portIndex) { 467 if (portIndex == kInputPortIndex) { 468 mEOSStatus = INPUT_DATA_AVAILABLE; 469 } 470} 471 472void SoftAVC::onPortEnableCompleted(OMX_U32 portIndex, bool enabled) { 473 switch (mOutputPortSettingsChange) { 474 case NONE: 475 break; 476 477 case AWAITING_DISABLED: 478 { 479 CHECK(!enabled); 480 mOutputPortSettingsChange = AWAITING_ENABLED; 481 break; 482 } 483 484 default: 485 { 486 CHECK_EQ((int)mOutputPortSettingsChange, (int)AWAITING_ENABLED); 487 CHECK(enabled); 488 mOutputPortSettingsChange = NONE; 489 break; 490 } 491 } 492} 493 494void SoftAVC::updatePortDefinitions() { 495 OMX_PARAM_PORTDEFINITIONTYPE *def = &editPortInfo(0)->mDef; 496 def->format.video.nFrameWidth = mWidth; 497 def->format.video.nFrameHeight = mHeight; 498 def->format.video.nStride = def->format.video.nFrameWidth; 499 def->format.video.nSliceHeight = def->format.video.nFrameHeight; 500 501 def = &editPortInfo(1)->mDef; 502 def->format.video.nFrameWidth = mWidth; 503 def->format.video.nFrameHeight = mHeight; 504 def->format.video.nStride = def->format.video.nFrameWidth; 505 def->format.video.nSliceHeight = def->format.video.nFrameHeight; 506 507 def->nBufferSize = 508 (def->format.video.nFrameWidth 509 * def->format.video.nFrameHeight * 3) / 2; 510} 511 512} // namespace android 513 514android::SoftOMXComponent *createSoftOMXComponent( 515 const char *name, const OMX_CALLBACKTYPE *callbacks, 516 OMX_PTR appData, OMX_COMPONENTTYPE **component) { 517 return new android::SoftAVC(name, callbacks, appData, component); 518} 519