Searched defs:frame (Results 1 - 25 of 103) sorted by path

12345

/frameworks/av/cmds/stagefright/
H A Djpeg.cpp40 int writeJpegFile(const char *filename, uint8_t *frame, int width, int height) { argument
74 uint16_t *src = (uint16_t *)(frame + row * width * 2);
H A Dmediafilter.cpp176 DecodedFrame frame = *decodedFrameIndices->begin(); local
180 // not produce an output frame
184 size_t outIndex = frame.index;
232 timeUs, frame.flags);
243 timeUs, frame.flags);
295 // if so, the frame will also be skipped in output stream
595 DecodedFrame frame; local
597 &frame.index, &frame.offset, &frame
[all...]
H A Dstagefright.cpp390 printf("decoded a total of %d frame(s).\n", n);
442 CHECK(!"sync frame detection not implemented yet for MPEG4");
445 CHECK(!"sync frame detection not implemented yet for H.263");
502 // Ignore everything up to the first IDR frame.
879 VideoFrame *frame = (VideoFrame *)mem->pointer(); local
882 (uint8_t *)frame + sizeof(VideoFrame),
883 frame->mWidth, frame->mHeight), 0);
/frameworks/av/media/libmediaplayerservice/
H A DMetadataRetrieverClient.cpp206 VideoFrame *frame = mRetriever->getFrameAtTime(timeUs, option); local
207 if (frame == NULL) {
208 ALOGE("failed to capture a video frame");
211 size_t size = sizeof(VideoFrame) + frame->mSize;
215 delete frame;
221 delete frame;
225 frameCopy->mWidth = frame->mWidth;
226 frameCopy->mHeight = frame->mHeight;
227 frameCopy->mDisplayWidth = frame->mDisplayWidth;
228 frameCopy->mDisplayHeight = frame
[all...]
/frameworks/av/media/libstagefright/
H A DAudioSource.cpp211 int16_t *frame = (int16_t *) data; local
218 frame[0] = (frame[0] * fixedMultiplier) >> kShift;
219 ++frame;
222 frame[0] = (frame[0] * fixedMultiplier) >> kShift;
223 frame[1] = (frame[1] * fixedMultiplier) >> kShift;
224 frame += 2;
351 if (mNumFramesReceived > 0) { // Ignore earlier frame los
[all...]
H A DCameraSource.cpp272 * @param width the video frame width in pixels
273 * @param height the video frame height in pixels
301 * Query the camera to retrieve the supported video frame sizes
308 * supported video frame sizes advertised by the camera.
341 * (width and height) and/or frame rate. If both width and
343 * if frameRate is -1, configuration on the frame rate
349 * @param width the target video frame width in pixels
350 * @param height the target video frame height in pixels
351 * @param frameRate the target frame rate in frames per second.
390 ALOGV("Supported frame rate
931 releaseRecordingFrame(const sp<IMemory>& frame) argument
1029 releaseOneRecordingFrame(const sp<IMemory>& frame) argument
1063 sp<IMemory> frame; local
[all...]
H A DFLACExtractor.cpp169 const FLAC__Frame *frame, const FLAC__int32 * const buffer[]);
195 const FLAC__Frame *frame, const FLAC__int32 * const buffer[],
249 const FLAC__StreamDecoder * /* decoder */, const FLAC__Frame *frame,
252 return ((FLACParser *) client_data)->writeCallback(frame, buffer);
324 const FLAC__Frame *frame, const FLAC__int32 * const buffer[])
328 // FLAC parser doesn't free or realloc buffer until next frame or finish
329 mWriteHeader = frame->header;
248 write_callback( const FLAC__StreamDecoder * , const FLAC__Frame *frame, const FLAC__int32 * const buffer[], void *client_data) argument
323 writeCallback( const FLAC__Frame *frame, const FLAC__int32 * const buffer[]) argument
H A DStagefrightMetadataRetriever.cpp160 // input and output ports, if seeking to a sync frame. NOTE: This request may
305 // Only need to decode one IDR frame, unless we're seeking with CLOSEST
354 // from the extractor, decode to the specified frame. Otherwise we're done.
370 ALOGE("Failed to decode thumbnail frame");
376 ALOGV("successfully decoded video frame.");
405 VideoFrame *frame = new VideoFrame; local
406 frame->mWidth = crop_right - crop_left + 1;
407 frame->mHeight = crop_bottom - crop_top + 1;
408 frame->mDisplayWidth = frame
543 VideoFrame *frame = local
[all...]
/frameworks/av/media/libstagefright/codecs/amrnb/dec/src/
H A Ddtx_dec.cpp204 // initialize decoder log frame energy
312 /* initialize decoder log frame energy */
580 Set_zero(lsfState->past_r_q, M); // reset for next speech frame
1073 Set_zero(lsfState->past_r_q, M); /* reset for next speech frame */
1396 frame =
1430 Word16 frame[])
1445 // compute log energy based on frame energy
1449 L_frame_en = L_mac(L_frame_en, frame[i], frame[i]);
1495 Word16 frame[],
1493 dtx_dec_activity_update(dtx_decState *st, Word16 lsf[], Word16 frame[], Flag *pOverflow) argument
[all...]
/frameworks/av/media/libstagefright/codecs/m4v_h263/enc/src/
H A Dmp4lib_int.h46 Int frame; /* frame number */ member in struct:tagVOP
54 Int padded; /* flag whether this frame has been padded */
207 Int Refresh; /* Number of MBs refresh in each frame */
210 Bool FineFrameSkip_Enabled; /* src rate resolution frame skipping */
212 Bool NoFrameSkip_Enabled; /* do not allow frame skip */
229 Int maxFrameSize; /* maximum frame size(bits) for H263/Short header mode, k*16384 */
264 Int target_bits; /* target bits for current frame, = rc->T */
265 Int actual_bits; /* actual bits for current frame obtained after encoding, = rc->Rc*/
266 Int QP; /* quantization level for current frame,
[all...]
/frameworks/av/media/libstagefright/matroska/
H A DMatroskaExtractor.cpp499 // Accept the first key frame
532 MediaBuffer *frame = *mPendingFrames.begin(); local
535 frame->release();
536 frame = NULL;
565 * : Bytes 1..N of encrypted frame :
591 * : Bytes 1..N of unencrypted frame :
619 const mkvparser::Block::Frame &frame = block->GetFrame(i); local
620 size_t len = frame.len;
635 status_t err = frame.Read(mExtractor->mReader, data + trackInfo->mHeaderLen);
691 MediaBuffer *frame local
1065 const mkvparser::Block::Frame &frame = block->GetFrame(0); local
[all...]
/frameworks/av/media/libstagefright/mpeg2ts/
H A DESQueue.cpp381 ALOGV("First ADTS AAC frame length is %zd bytes, "
831 // The final AAC frame may well extend into the next RangeInfo but
912 // tracking the frame positions first then decrypt only if an accessUnit to be generated
914 ADTSPosition frame = { local
920 frames.push(frame);
930 const ADTSPosition &frame = frames.itemAt(frameId); local
932 mSampleDecryptor->processAAC(frame.headerSize,
933 mBuffer->data() + frame.offset, frame.length);
935 // frameId, frame
[all...]
/frameworks/av/services/camera/libcameraservice/api1/
H A DCameraClient.cpp491 // release a recording frame
969 // preview callback - frame buffer update
983 LOG2("frame callback is disabled");
1001 // Is the received frame copied out or not?
1003 LOG2("frame is copied");
1006 LOG2("frame is forwarded");
1129 ALOGE("%s: Failed to mmap heap for preview frame.", __FUNCTION__);
1133 ALOGE("%s: Failed to mmap preview buffer for preview frame.", __FUNCTION__);
1140 sp<MemoryBase> frame = new MemoryBase(previewBuffer, 0, size); local
1141 if (frame
[all...]
/frameworks/av/services/camera/libcameraservice/api1/client2/
H A DFrameProcessor.cpp65 bool FrameProcessor::processSingleFrame(CaptureResult &frame, argument
75 isPartialResult = frame.mResultExtras.partialResultCount < mNumPartialResults;
78 if (!isPartialResult && processFaceDetect(frame.mMetadata, client) != OK) {
83 process3aState(frame, client);
86 return FrameProcessorBase::processSingleFrame(frame, device);
89 status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
100 entry = frame.find(ANDROID_STATISTICS_FACE_DETECT_MODE);
117 entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES);
119 // No faces this frame
134 entry = frame
234 process3aState(const CaptureResult &frame, const sp<Camera2Client> &client) argument
[all...]
H A DZslProcessor.cpp143 // Initialize buffer queue and frame list based on pipeline max depth.
166 ALOGV("%s: Initialize buffer queue and frame list depth based on max pipeline depth (%zu)",
199 ALOGE("%s: metadata doesn't have frame number, skip this result", __FUNCTION__);
204 ALOGVV("Got preview metadata for frame %d with timestamp %" PRId64, frameNumber, timestamp);
538 // Verify that the frame is reasonable for reprocessing
543 ALOGE("%s: ZSL queue frame has no AE state field!",
549 ALOGV("%s: ZSL queue frame AE state is %d, need full capture",
583 // TODO: Shouldn't we also update the latest preview frame?
597 ALOGE("%s: Unable to update frame to a reprocess request",
773 if (!queueEntry.frame
820 const CameraMetadata &frame = mFrameList[j]; local
[all...]
H A DZslProcessor.h122 CameraMetadata frame; member in struct:android::camera2::ZslProcessor::ZslPair
/frameworks/av/services/camera/libcameraservice/device1/
H A DCameraHardwareInterface.cpp620 hidl_handle frame = nh; local
621 mHidlDevice->releaseRecordingFrameHandle(heapId, bufferIndex, frame);
/frameworks/av/services/camera/libcameraservice/device3/
H A DCamera3Device.cpp20 //#define LOG_NNDEBUG 0 // Per-frame verbose logging
1647 ALOGW("%s: Camera %s: No frame in %" PRId64 " ns: %s (%d)",
1655 status_t Camera3Device::getNextResult(CaptureResult *frame) { argument
1663 if (frame == NULL) {
1669 frame->mResultExtras = result.mResultExtras;
1670 frame->mMetadata.acquire(result.mMetadata);
2377 ATRACE_ASYNC_END("frame capture", frameNumber);
2384 ") for frame %d doesn't match shutter timestamp (%" PRId64 ")",
2396 ALOGVV("%s: removed frame %d from InFlightMap", __FUNCTION__, frameNumber);
2421 SET_ERR("Failed to set frame numbe
[all...]
/frameworks/base/cmds/bootanimation/
H A DBootAnimation.cpp755 Animation::Frame frame; local
756 frame.name = leaf;
757 frame.map = map;
758 frame.trimWidth = animation.width;
759 frame.trimHeight = animation.height;
760 frame.trimX = 0;
761 frame.trimY = 0;
762 part.frames.add(frame);
788 Animation::Frame& frame(part.frames.editItemAt(frameIdx));
789 frame
[all...]
/frameworks/base/core/java/android/speech/tts/
H A DTextToSpeechService.java671 public void dispatchOnRangeStart(int start, int end, int frame); argument
892 public void dispatchOnRangeStart(int start, int end, int frame) { argument
896 getCallerIdentity(), utteranceId, start, end, frame);
1578 Object callerIdentity, String utteranceId, int start, int end, int frame) {
1582 cb.onRangeStart(utteranceId, start, end, frame);
1577 dispatchOnRangeStart( Object callerIdentity, String utteranceId, int start, int end, int frame) argument
H A DUtteranceProgressListener.java141 * @param frame The position in frames in the audio of the request where this range is spoken.
143 public void onRangeStart(String utteranceId, int start, int end, int frame) { argument
/frameworks/base/core/java/android/view/
H A DChoreographer.java40 * the next display frame.
48 * display frame rendering, use {@link android.animation.ValueAnimator#start}.</li>
50 * frame, use {@link View#postOnAnimation}.</li>
52 * frame after a delay, use {@link View#postOnAnimationDelayed}.</li>
54 * next display frame, use {@link View#postInvalidateOnAnimation()} or
57 * sync with display frame rendering, do nothing. This already happens automatically.
83 // Prints debug messages about every frame and callback registered (high volume).
89 // default value be less than the true inter-frame delay on all devices to avoid
129 // Enable/disable using the frame time instead of returning now.
142 // All frame callback
600 doFrame(long frameTimeNanos, int frame) argument
849 onVsync(long timestampNanos, int builtInDisplayId, int frame) argument
[all...]
H A DDisplayEventReceiver.java49 * Surface Flinger is processing a frame.
131 * The recipient should render a frame and then call {@link #scheduleVsync}
138 * @param frame The frame number. Increases by one for each vertical sync interval.
140 public void onVsync(long timestampNanos, int builtInDisplayId, int frame) { argument
157 * display frame begins.
170 private void dispatchVsync(long timestampNanos, int builtInDisplayId, int frame) { argument
171 onVsync(timestampNanos, builtInDisplayId, frame);
H A DSurfaceControl.java102 IBinder handle, long frame);
104 long surfaceObject, long frame);
430 public void deferTransactionUntil(IBinder handle, long frame) { argument
431 if (frame > 0) {
432 nativeDeferTransactionUntil(mNativeObject, handle, frame);
436 public void deferTransactionUntil(Surface barrier, long frame) { argument
437 if (frame > 0) {
438 nativeDeferTransactionUntilSurface(mNativeObject, barrier.mNativeObject, frame);
101 nativeDeferTransactionUntil(long nativeObject, IBinder handle, long frame) argument
103 nativeDeferTransactionUntilSurface(long nativeObject, long surfaceObject, long frame) argument
H A DSurfaceView.java109 // We use this to track if the application has produced a frame
714 + ", frame=" + mSurfaceFrame);
803 // its 2nd frame if RenderThread is running slowly could potentially see
809 // a single-frame desync is therefore preferable for now.
1205 public void deferTransactionUntil(IBinder handle, long frame) { argument
1206 super.deferTransactionUntil(handle, frame);
1207 mBackgroundControl.deferTransactionUntil(handle, frame);
1211 public void deferTransactionUntil(Surface barrier, long frame) { argument
1212 super.deferTransactionUntil(barrier, frame);
1213 mBackgroundControl.deferTransactionUntil(barrier, frame);
[all...]

Completed in 6952 milliseconds

12345