screenrecord.cpp revision e32106fd5175afdf939ae397aece9caf378a4912
1/*
2 * Copyright 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "ScreenRecord"
18#define ATRACE_TAG ATRACE_TAG_GRAPHICS
19//#define LOG_NDEBUG 0
20#include <utils/Log.h>
21
22#include <binder/IPCThreadState.h>
23#include <utils/Errors.h>
24#include <utils/Timers.h>
25#include <utils/Trace.h>
26
27#include <gui/Surface.h>
28#include <gui/SurfaceComposerClient.h>
29#include <gui/ISurfaceComposer.h>
30#include <ui/DisplayInfo.h>
31#include <media/openmax/OMX_IVCommon.h>
32#include <media/stagefright/foundation/ABuffer.h>
33#include <media/stagefright/foundation/AMessage.h>
34#include <media/stagefright/MediaCodec.h>
35#include <media/stagefright/MediaErrors.h>
36#include <media/stagefright/MediaMuxer.h>
37#include <media/ICrypto.h>
38
39#include <stdlib.h>
40#include <unistd.h>
41#include <string.h>
42#include <stdio.h>
43#include <ctype.h>
44#include <fcntl.h>
45#include <signal.h>
46#include <getopt.h>
47#include <sys/wait.h>
48#include <termios.h>
49#include <assert.h>
50
51#include "screenrecord.h"
52#include "Overlay.h"
53#include "FrameOutput.h"
54
55using namespace android;
56
57static const uint32_t kMinBitRate = 100000;         // 0.1Mbps
58static const uint32_t kMaxBitRate = 200 * 1000000;  // 200Mbps
59static const uint32_t kMaxTimeLimitSec = 180;       // 3 minutes
60static const uint32_t kFallbackWidth = 1280;        // 720p
61static const uint32_t kFallbackHeight = 720;
62static const char* kMimeTypeAvc = "video/avc";
63
64// Command-line parameters.
65static bool gVerbose = false;           // chatty on stdout
66static bool gRotate = false;            // rotate 90 degrees
67static enum {
68    FORMAT_MP4, FORMAT_H264, FORMAT_FRAMES, FORMAT_RAW_FRAMES
69} gOutputFormat = FORMAT_MP4;           // data format for output
70static bool gSizeSpecified = false;     // was size explicitly requested?
71static bool gWantInfoScreen = false;    // do we want initial info screen?
72static bool gWantFrameTime = false;     // do we want times on each frame?
73static uint32_t gVideoWidth = 0;        // default width+height
74static uint32_t gVideoHeight = 0;
75static uint32_t gBitRate = 4000000;     // 4Mbps
76static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
77
78// Set by signal handler to stop recording.
79static volatile bool gStopRequested;
80
81// Previous signal handler state, restored after first hit.
82static struct sigaction gOrigSigactionINT;
83static struct sigaction gOrigSigactionHUP;
84
85
86/*
87 * Catch keyboard interrupt signals.  On receipt, the "stop requested"
88 * flag is raised, and the original handler is restored (so that, if
89 * we get stuck finishing, a second Ctrl-C will kill the process).
90 */
91static void signalCatcher(int signum)
92{
93    gStopRequested = true;
94    switch (signum) {
95    case SIGINT:
96    case SIGHUP:
97        sigaction(SIGINT, &gOrigSigactionINT, NULL);
98        sigaction(SIGHUP, &gOrigSigactionHUP, NULL);
99        break;
100    default:
101        abort();
102        break;
103    }
104}
105
106/*
107 * Configures signal handlers.  The previous handlers are saved.
108 *
109 * If the command is run from an interactive adb shell, we get SIGINT
110 * when Ctrl-C is hit.  If we're run from the host, the local adb process
111 * gets the signal, and we get a SIGHUP when the terminal disconnects.
112 */
113static status_t configureSignals() {
114    struct sigaction act;
115    memset(&act, 0, sizeof(act));
116    act.sa_handler = signalCatcher;
117    if (sigaction(SIGINT, &act, &gOrigSigactionINT) != 0) {
118        status_t err = -errno;
119        fprintf(stderr, "Unable to configure SIGINT handler: %s\n",
120                strerror(errno));
121        return err;
122    }
123    if (sigaction(SIGHUP, &act, &gOrigSigactionHUP) != 0) {
124        status_t err = -errno;
125        fprintf(stderr, "Unable to configure SIGHUP handler: %s\n",
126                strerror(errno));
127        return err;
128    }
129    return NO_ERROR;
130}
131
132/*
133 * Returns "true" if the device is rotated 90 degrees.
134 */
135static bool isDeviceRotated(int orientation) {
136    return orientation != DISPLAY_ORIENTATION_0 &&
137            orientation != DISPLAY_ORIENTATION_180;
138}
139
140/*
141 * Configures and starts the MediaCodec encoder.  Obtains an input surface
142 * from the codec.
143 */
144static status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
145        sp<IGraphicBufferProducer>* pBufferProducer) {
146    status_t err;
147
148    if (gVerbose) {
149        printf("Configuring recorder for %dx%d %s at %.2fMbps\n",
150                gVideoWidth, gVideoHeight, kMimeTypeAvc, gBitRate / 1000000.0);
151    }
152
153    sp<AMessage> format = new AMessage;
154    format->setInt32("width", gVideoWidth);
155    format->setInt32("height", gVideoHeight);
156    format->setString("mime", kMimeTypeAvc);
157    format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
158    format->setInt32("bitrate", gBitRate);
159    format->setFloat("frame-rate", displayFps);
160    format->setInt32("i-frame-interval", 10);
161
162    sp<ALooper> looper = new ALooper;
163    looper->setName("screenrecord_looper");
164    looper->start();
165    ALOGV("Creating codec");
166    sp<MediaCodec> codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
167    if (codec == NULL) {
168        fprintf(stderr, "ERROR: unable to create %s codec instance\n",
169                kMimeTypeAvc);
170        return UNKNOWN_ERROR;
171    }
172
173    err = codec->configure(format, NULL, NULL,
174            MediaCodec::CONFIGURE_FLAG_ENCODE);
175    if (err != NO_ERROR) {
176        fprintf(stderr, "ERROR: unable to configure %s codec at %dx%d (err=%d)\n",
177                kMimeTypeAvc, gVideoWidth, gVideoHeight, err);
178        codec->release();
179        return err;
180    }
181
182    ALOGV("Creating encoder input surface");
183    sp<IGraphicBufferProducer> bufferProducer;
184    err = codec->createInputSurface(&bufferProducer);
185    if (err != NO_ERROR) {
186        fprintf(stderr,
187            "ERROR: unable to create encoder input surface (err=%d)\n", err);
188        codec->release();
189        return err;
190    }
191
192    ALOGV("Starting codec");
193    err = codec->start();
194    if (err != NO_ERROR) {
195        fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err);
196        codec->release();
197        return err;
198    }
199
200    ALOGV("Codec prepared");
201    *pCodec = codec;
202    *pBufferProducer = bufferProducer;
203    return 0;
204}
205
206/*
207 * Sets the display projection, based on the display dimensions, video size,
208 * and device orientation.
209 */
210static status_t setDisplayProjection(const sp<IBinder>& dpy,
211        const DisplayInfo& mainDpyInfo) {
212    status_t err;
213
214    // Set the region of the layer stack we're interested in, which in our
215    // case is "all of it".  If the app is rotated (so that the width of the
216    // app is based on the height of the display), reverse width/height.
217    bool deviceRotated = isDeviceRotated(mainDpyInfo.orientation);
218    uint32_t sourceWidth, sourceHeight;
219    if (!deviceRotated) {
220        sourceWidth = mainDpyInfo.w;
221        sourceHeight = mainDpyInfo.h;
222    } else {
223        ALOGV("using rotated width/height");
224        sourceHeight = mainDpyInfo.w;
225        sourceWidth = mainDpyInfo.h;
226    }
227    Rect layerStackRect(sourceWidth, sourceHeight);
228
229    // We need to preserve the aspect ratio of the display.
230    float displayAspect = (float) sourceHeight / (float) sourceWidth;
231
232
233    // Set the way we map the output onto the display surface (which will
234    // be e.g. 1280x720 for a 720p video).  The rect is interpreted
235    // post-rotation, so if the display is rotated 90 degrees we need to
236    // "pre-rotate" it by flipping width/height, so that the orientation
237    // adjustment changes it back.
238    //
239    // We might want to encode a portrait display as landscape to use more
240    // of the screen real estate.  (If players respect a 90-degree rotation
241    // hint, we can essentially get a 720x1280 video instead of 1280x720.)
242    // In that case, we swap the configured video width/height and then
243    // supply a rotation value to the display projection.
244    uint32_t videoWidth, videoHeight;
245    uint32_t outWidth, outHeight;
246    if (!gRotate) {
247        videoWidth = gVideoWidth;
248        videoHeight = gVideoHeight;
249    } else {
250        videoWidth = gVideoHeight;
251        videoHeight = gVideoWidth;
252    }
253    if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
254        // limited by narrow width; reduce height
255        outWidth = videoWidth;
256        outHeight = (uint32_t)(videoWidth * displayAspect);
257    } else {
258        // limited by short height; restrict width
259        outHeight = videoHeight;
260        outWidth = (uint32_t)(videoHeight / displayAspect);
261    }
262    uint32_t offX, offY;
263    offX = (videoWidth - outWidth) / 2;
264    offY = (videoHeight - outHeight) / 2;
265    Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
266
267    if (gVerbose) {
268        if (gRotate) {
269            printf("Rotated content area is %ux%u at offset x=%d y=%d\n",
270                    outHeight, outWidth, offY, offX);
271        } else {
272            printf("Content area is %ux%u at offset x=%d y=%d\n",
273                    outWidth, outHeight, offX, offY);
274        }
275    }
276
277    SurfaceComposerClient::setDisplayProjection(dpy,
278            gRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
279            layerStackRect, displayRect);
280    return NO_ERROR;
281}
282
283/*
284 * Configures the virtual display.  When this completes, virtual display
285 * frames will start arriving from the buffer producer.
286 */
287static status_t prepareVirtualDisplay(const DisplayInfo& mainDpyInfo,
288        const sp<IGraphicBufferProducer>& bufferProducer,
289        sp<IBinder>* pDisplayHandle) {
290    sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
291            String8("ScreenRecorder"), false /*secure*/);
292
293    SurfaceComposerClient::openGlobalTransaction();
294    SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer);
295    setDisplayProjection(dpy, mainDpyInfo);
296    SurfaceComposerClient::setDisplayLayerStack(dpy, 0);    // default stack
297    SurfaceComposerClient::closeGlobalTransaction();
298
299    *pDisplayHandle = dpy;
300
301    return NO_ERROR;
302}
303
304/*
305 * Runs the MediaCodec encoder, sending the output to the MediaMuxer.  The
306 * input frames are coming from the virtual display as fast as SurfaceFlinger
307 * wants to send them.
308 *
309 * Exactly one of muxer or rawFp must be non-null.
310 *
311 * The muxer must *not* have been started before calling.
312 */
313static status_t runEncoder(const sp<MediaCodec>& encoder,
314        const sp<MediaMuxer>& muxer, FILE* rawFp, const sp<IBinder>& mainDpy,
315        const sp<IBinder>& virtualDpy, uint8_t orientation) {
316    static int kTimeout = 250000;   // be responsive on signal
317    status_t err;
318    ssize_t trackIdx = -1;
319    uint32_t debugNumFrames = 0;
320    int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
321    int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
322    DisplayInfo mainDpyInfo;
323
324    assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
325
326    Vector<sp<ABuffer> > buffers;
327    err = encoder->getOutputBuffers(&buffers);
328    if (err != NO_ERROR) {
329        fprintf(stderr, "Unable to get output buffers (err=%d)\n", err);
330        return err;
331    }
332
333    // This is set by the signal handler.
334    gStopRequested = false;
335
336    // Run until we're signaled.
337    while (!gStopRequested) {
338        size_t bufIndex, offset, size;
339        int64_t ptsUsec;
340        uint32_t flags;
341
342        if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
343            if (gVerbose) {
344                printf("Time limit reached\n");
345            }
346            break;
347        }
348
349        ALOGV("Calling dequeueOutputBuffer");
350        err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
351                &flags, kTimeout);
352        ALOGV("dequeueOutputBuffer returned %d", err);
353        switch (err) {
354        case NO_ERROR:
355            // got a buffer
356            if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
357                ALOGV("Got codec config buffer (%u bytes)", size);
358                if (muxer != NULL) {
359                    // ignore this -- we passed the CSD into MediaMuxer when
360                    // we got the format change notification
361                    size = 0;
362                }
363            }
364            if (size != 0) {
365                ALOGV("Got data in buffer %d, size=%d, pts=%lld",
366                        bufIndex, size, ptsUsec);
367
368                { // scope
369                    ATRACE_NAME("orientation");
370                    // Check orientation, update if it has changed.
371                    //
372                    // Polling for changes is inefficient and wrong, but the
373                    // useful stuff is hard to get at without a Dalvik VM.
374                    err = SurfaceComposerClient::getDisplayInfo(mainDpy,
375                            &mainDpyInfo);
376                    if (err != NO_ERROR) {
377                        ALOGW("getDisplayInfo(main) failed: %d", err);
378                    } else if (orientation != mainDpyInfo.orientation) {
379                        ALOGD("orientation changed, now %d", mainDpyInfo.orientation);
380                        SurfaceComposerClient::openGlobalTransaction();
381                        setDisplayProjection(virtualDpy, mainDpyInfo);
382                        SurfaceComposerClient::closeGlobalTransaction();
383                        orientation = mainDpyInfo.orientation;
384                    }
385                }
386
387                // If the virtual display isn't providing us with timestamps,
388                // use the current time.  This isn't great -- we could get
389                // decoded data in clusters -- but we're not expecting
390                // to hit this anyway.
391                if (ptsUsec == 0) {
392                    ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
393                }
394
395                if (muxer == NULL) {
396                    fwrite(buffers[bufIndex]->data(), 1, size, rawFp);
397                    // Flush the data immediately in case we're streaming.
398                    // We don't want to do this if all we've written is
399                    // the SPS/PPS data because mplayer gets confused.
400                    if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0) {
401                        fflush(rawFp);
402                    }
403                } else {
404                    // The MediaMuxer docs are unclear, but it appears that we
405                    // need to pass either the full set of BufferInfo flags, or
406                    // (flags & BUFFER_FLAG_SYNCFRAME).
407                    //
408                    // If this blocks for too long we could drop frames.  We may
409                    // want to queue these up and do them on a different thread.
410                    ATRACE_NAME("write sample");
411                    assert(trackIdx != -1);
412                    err = muxer->writeSampleData(buffers[bufIndex], trackIdx,
413                            ptsUsec, flags);
414                    if (err != NO_ERROR) {
415                        fprintf(stderr,
416                            "Failed writing data to muxer (err=%d)\n", err);
417                        return err;
418                    }
419                }
420                debugNumFrames++;
421            }
422            err = encoder->releaseOutputBuffer(bufIndex);
423            if (err != NO_ERROR) {
424                fprintf(stderr, "Unable to release output buffer (err=%d)\n",
425                        err);
426                return err;
427            }
428            if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
429                // Not expecting EOS from SurfaceFlinger.  Go with it.
430                ALOGI("Received end-of-stream");
431                gStopRequested = true;
432            }
433            break;
434        case -EAGAIN:                       // INFO_TRY_AGAIN_LATER
435            ALOGV("Got -EAGAIN, looping");
436            break;
437        case INFO_FORMAT_CHANGED:           // INFO_OUTPUT_FORMAT_CHANGED
438            {
439                // Format includes CSD, which we must provide to muxer.
440                ALOGV("Encoder format changed");
441                sp<AMessage> newFormat;
442                encoder->getOutputFormat(&newFormat);
443                if (muxer != NULL) {
444                    trackIdx = muxer->addTrack(newFormat);
445                    ALOGV("Starting muxer");
446                    err = muxer->start();
447                    if (err != NO_ERROR) {
448                        fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
449                        return err;
450                    }
451                }
452            }
453            break;
454        case INFO_OUTPUT_BUFFERS_CHANGED:   // INFO_OUTPUT_BUFFERS_CHANGED
455            // Not expected for an encoder; handle it anyway.
456            ALOGV("Encoder buffers changed");
457            err = encoder->getOutputBuffers(&buffers);
458            if (err != NO_ERROR) {
459                fprintf(stderr,
460                        "Unable to get new output buffers (err=%d)\n", err);
461                return err;
462            }
463            break;
464        case INVALID_OPERATION:
465            ALOGW("dequeueOutputBuffer returned INVALID_OPERATION");
466            return err;
467        default:
468            fprintf(stderr,
469                    "Got weird result %d from dequeueOutputBuffer\n", err);
470            return err;
471        }
472    }
473
474    ALOGV("Encoder stopping (req=%d)", gStopRequested);
475    if (gVerbose) {
476        printf("Encoder stopping; recorded %u frames in %lld seconds\n",
477                debugNumFrames, nanoseconds_to_seconds(
478                        systemTime(CLOCK_MONOTONIC) - startWhenNsec));
479    }
480    return NO_ERROR;
481}
482
483/*
484 * Raw H.264 byte stream output requested.  Send the output to stdout
485 * if desired.  If the output is a tty, reconfigure it to avoid the
486 * CRLF line termination that we see with "adb shell" commands.
487 */
488static FILE* prepareRawOutput(const char* fileName) {
489    FILE* rawFp = NULL;
490
491    if (strcmp(fileName, "-") == 0) {
492        if (gVerbose) {
493            fprintf(stderr, "ERROR: verbose output and '-' not compatible");
494            return NULL;
495        }
496        rawFp = stdout;
497    } else {
498        rawFp = fopen(fileName, "w");
499        if (rawFp == NULL) {
500            fprintf(stderr, "fopen raw failed: %s\n", strerror(errno));
501            return NULL;
502        }
503    }
504
505    int fd = fileno(rawFp);
506    if (isatty(fd)) {
507        // best effort -- reconfigure tty for "raw"
508        ALOGD("raw video output to tty (fd=%d)", fd);
509        struct termios term;
510        if (tcgetattr(fd, &term) == 0) {
511            cfmakeraw(&term);
512            if (tcsetattr(fd, TCSANOW, &term) == 0) {
513                ALOGD("tty successfully configured for raw");
514            }
515        }
516    }
517
518    return rawFp;
519}
520
521/*
522 * Main "do work" start point.
523 *
524 * Configures codec, muxer, and virtual display, then starts moving bits
525 * around.
526 */
527static status_t recordScreen(const char* fileName) {
528    status_t err;
529
530    // Configure signal handler.
531    err = configureSignals();
532    if (err != NO_ERROR) return err;
533
534    // Start Binder thread pool.  MediaCodec needs to be able to receive
535    // messages from mediaserver.
536    sp<ProcessState> self = ProcessState::self();
537    self->startThreadPool();
538
539    // Get main display parameters.
540    sp<IBinder> mainDpy = SurfaceComposerClient::getBuiltInDisplay(
541            ISurfaceComposer::eDisplayIdMain);
542    DisplayInfo mainDpyInfo;
543    err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo);
544    if (err != NO_ERROR) {
545        fprintf(stderr, "ERROR: unable to get display characteristics\n");
546        return err;
547    }
548    if (gVerbose) {
549        printf("Main display is %dx%d @%.2ffps (orientation=%u)\n",
550                mainDpyInfo.w, mainDpyInfo.h, mainDpyInfo.fps,
551                mainDpyInfo.orientation);
552    }
553
554    bool rotated = isDeviceRotated(mainDpyInfo.orientation);
555    if (gVideoWidth == 0) {
556        gVideoWidth = rotated ? mainDpyInfo.h : mainDpyInfo.w;
557    }
558    if (gVideoHeight == 0) {
559        gVideoHeight = rotated ? mainDpyInfo.w : mainDpyInfo.h;
560    }
561
562    // Configure and start the encoder.
563    sp<MediaCodec> encoder;
564    sp<FrameOutput> frameOutput;
565    sp<IGraphicBufferProducer> encoderInputSurface;
566    if (gOutputFormat != FORMAT_FRAMES && gOutputFormat != FORMAT_RAW_FRAMES) {
567        err = prepareEncoder(mainDpyInfo.fps, &encoder, &encoderInputSurface);
568
569        if (err != NO_ERROR && !gSizeSpecified) {
570            // fallback is defined for landscape; swap if we're in portrait
571            bool needSwap = gVideoWidth < gVideoHeight;
572            uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
573            uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
574            if (gVideoWidth != newWidth && gVideoHeight != newHeight) {
575                ALOGV("Retrying with 720p");
576                fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
577                        gVideoWidth, gVideoHeight, newWidth, newHeight);
578                gVideoWidth = newWidth;
579                gVideoHeight = newHeight;
580                err = prepareEncoder(mainDpyInfo.fps, &encoder,
581                        &encoderInputSurface);
582            }
583        }
584        if (err != NO_ERROR) return err;
585
586        // From here on, we must explicitly release() the encoder before it goes
587        // out of scope, or we will get an assertion failure from stagefright
588        // later on in a different thread.
589    } else {
590        // We're not using an encoder at all.  The "encoder input surface" we hand to
591        // SurfaceFlinger will just feed directly to us.
592        frameOutput = new FrameOutput();
593        err = frameOutput->createInputSurface(gVideoWidth, gVideoHeight, &encoderInputSurface);
594        if (err != NO_ERROR) {
595            return err;
596        }
597    }
598
599    // Draw the "info" page by rendering a frame with GLES and sending
600    // it directly to the encoder.
601    // TODO: consider displaying this as a regular layer to avoid b/11697754
602    if (gWantInfoScreen) {
603        Overlay::drawInfoPage(encoderInputSurface);
604    }
605
606    // Configure optional overlay.
607    sp<IGraphicBufferProducer> bufferProducer;
608    sp<Overlay> overlay;
609    if (gWantFrameTime) {
610        // Send virtual display frames to an external texture.
611        overlay = new Overlay();
612        err = overlay->start(encoderInputSurface, &bufferProducer);
613        if (err != NO_ERROR) {
614            if (encoder != NULL) encoder->release();
615            return err;
616        }
617        if (gVerbose) {
618            printf("Bugreport overlay created\n");
619        }
620    } else {
621        // Use the encoder's input surface as the virtual display surface.
622        bufferProducer = encoderInputSurface;
623    }
624
625    // Configure virtual display.
626    sp<IBinder> dpy;
627    err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy);
628    if (err != NO_ERROR) {
629        if (encoder != NULL) encoder->release();
630        return err;
631    }
632
633    sp<MediaMuxer> muxer = NULL;
634    FILE* rawFp = NULL;
635    switch (gOutputFormat) {
636        case FORMAT_MP4: {
637            // Configure muxer.  We have to wait for the CSD blob from the encoder
638            // before we can start it.
639            muxer = new MediaMuxer(fileName, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
640            if (gRotate) {
641                muxer->setOrientationHint(90);  // TODO: does this do anything?
642            }
643            break;
644        }
645        case FORMAT_H264:
646        case FORMAT_FRAMES:
647        case FORMAT_RAW_FRAMES: {
648            rawFp = prepareRawOutput(fileName);
649            if (rawFp == NULL) {
650                if (encoder != NULL) encoder->release();
651                return -1;
652            }
653            break;
654        }
655        default:
656            fprintf(stderr, "ERROR: unknown format %d\n", gOutputFormat);
657            abort();
658    }
659
660    if (gOutputFormat == FORMAT_FRAMES || gOutputFormat == FORMAT_RAW_FRAMES) {
661        // TODO: if we want to make this a proper feature, we should output
662        //       an outer header with version info.  Right now we never change
663        //       the frame size or format, so we could conceivably just send
664        //       the current frame header once and then follow it with an
665        //       unbroken stream of data.
666
667        // Make the EGL context current again.  This gets unhooked if we're
668        // using "--bugreport" mode.
669        // TODO: figure out if we can eliminate this
670        frameOutput->prepareToCopy();
671
672        while (!gStopRequested) {
673            // Poll for frames, the same way we do for MediaCodec.  We do
674            // all of the work on the main thread.
675            //
676            // Ideally we'd sleep indefinitely and wake when the
677            // stop was requested, but this will do for now.  (It almost
678            // works because wait() wakes when a signal hits, but we
679            // need to handle the edge cases.)
680            bool rawFrames = gOutputFormat == FORMAT_RAW_FRAMES;
681            err = frameOutput->copyFrame(rawFp, 250000, rawFrames);
682            if (err == ETIMEDOUT) {
683                err = NO_ERROR;
684            } else if (err != NO_ERROR) {
685                ALOGE("Got error %d from copyFrame()", err);
686                break;
687            }
688        }
689    } else {
690        // Main encoder loop.
691        err = runEncoder(encoder, muxer, rawFp, mainDpy, dpy,
692                mainDpyInfo.orientation);
693        if (err != NO_ERROR) {
694            fprintf(stderr, "Encoder failed (err=%d)\n", err);
695            // fall through to cleanup
696        }
697
698        if (gVerbose) {
699            printf("Stopping encoder and muxer\n");
700        }
701    }
702
703    // Shut everything down, starting with the producer side.
704    encoderInputSurface = NULL;
705    SurfaceComposerClient::destroyDisplay(dpy);
706    if (overlay != NULL) overlay->stop();
707    if (encoder != NULL) encoder->stop();
708    if (muxer != NULL) {
709        // If we don't stop muxer explicitly, i.e. let the destructor run,
710        // it may hang (b/11050628).
711        muxer->stop();
712    } else if (rawFp != stdout) {
713        fclose(rawFp);
714    }
715    if (encoder != NULL) encoder->release();
716
717    return err;
718}
719
720/*
721 * Sends a broadcast to the media scanner to tell it about the new video.
722 *
723 * This is optional, but nice to have.
724 */
725static status_t notifyMediaScanner(const char* fileName) {
726    // need to do allocations before the fork()
727    String8 fileUrl("file://");
728    fileUrl.append(fileName);
729
730    const char* kCommand = "/system/bin/am";
731    const char* const argv[] = {
732            kCommand,
733            "broadcast",
734            "-a",
735            "android.intent.action.MEDIA_SCANNER_SCAN_FILE",
736            "-d",
737            fileUrl.string(),
738            NULL
739    };
740    if (gVerbose) {
741        printf("Executing:");
742        for (int i = 0; argv[i] != NULL; i++) {
743            printf(" %s", argv[i]);
744        }
745        putchar('\n');
746    }
747
748    pid_t pid = fork();
749    if (pid < 0) {
750        int err = errno;
751        ALOGW("fork() failed: %s", strerror(err));
752        return -err;
753    } else if (pid > 0) {
754        // parent; wait for the child, mostly to make the verbose-mode output
755        // look right, but also to check for and log failures
756        int status;
757        pid_t actualPid = TEMP_FAILURE_RETRY(waitpid(pid, &status, 0));
758        if (actualPid != pid) {
759            ALOGW("waitpid(%d) returned %d (errno=%d)", pid, actualPid, errno);
760        } else if (status != 0) {
761            ALOGW("'am broadcast' exited with status=%d", status);
762        } else {
763            ALOGV("'am broadcast' exited successfully");
764        }
765    } else {
766        if (!gVerbose) {
767            // non-verbose, suppress 'am' output
768            ALOGV("closing stdout/stderr in child");
769            int fd = open("/dev/null", O_WRONLY);
770            if (fd >= 0) {
771                dup2(fd, STDOUT_FILENO);
772                dup2(fd, STDERR_FILENO);
773                close(fd);
774            }
775        }
776        execv(kCommand, const_cast<char* const*>(argv));
777        ALOGE("execv(%s) failed: %s\n", kCommand, strerror(errno));
778        exit(1);
779    }
780    return NO_ERROR;
781}
782
783/*
784 * Parses a string of the form "1280x720".
785 *
786 * Returns true on success.
787 */
788static bool parseWidthHeight(const char* widthHeight, uint32_t* pWidth,
789        uint32_t* pHeight) {
790    long width, height;
791    char* end;
792
793    // Must specify base 10, or "0x0" gets parsed differently.
794    width = strtol(widthHeight, &end, 10);
795    if (end == widthHeight || *end != 'x' || *(end+1) == '\0') {
796        // invalid chars in width, or missing 'x', or missing height
797        return false;
798    }
799    height = strtol(end + 1, &end, 10);
800    if (*end != '\0') {
801        // invalid chars in height
802        return false;
803    }
804
805    *pWidth = width;
806    *pHeight = height;
807    return true;
808}
809
810/*
811 * Accepts a string with a bare number ("4000000") or with a single-character
812 * unit ("4m").
813 *
814 * Returns an error if parsing fails.
815 */
816static status_t parseValueWithUnit(const char* str, uint32_t* pValue) {
817    long value;
818    char* endptr;
819
820    value = strtol(str, &endptr, 10);
821    if (*endptr == '\0') {
822        // bare number
823        *pValue = value;
824        return NO_ERROR;
825    } else if (toupper(*endptr) == 'M' && *(endptr+1) == '\0') {
826        *pValue = value * 1000000;  // check for overflow?
827        return NO_ERROR;
828    } else {
829        fprintf(stderr, "Unrecognized value: %s\n", str);
830        return UNKNOWN_ERROR;
831    }
832}
833
834/*
835 * Dumps usage on stderr.
836 */
837static void usage() {
838    fprintf(stderr,
839        "Usage: screenrecord [options] <filename>\n"
840        "\n"
841        "Android screenrecord v%d.%d.  Records the device's display to a .mp4 file.\n"
842        "\n"
843        "Options:\n"
844        "--size WIDTHxHEIGHT\n"
845        "    Set the video size, e.g. \"1280x720\".  Default is the device's main\n"
846        "    display resolution (if supported), 1280x720 if not.  For best results,\n"
847        "    use a size supported by the AVC encoder.\n"
848        "--bit-rate RATE\n"
849        "    Set the video bit rate, in bits per second.  Value may be specified as\n"
850        "    bits or megabits, e.g. '4000000' is equivalent to '4M'.  Default %dMbps.\n"
851        "--bugreport\n"
852        "    Add additional information, such as a timestamp overlay, that is helpful\n"
853        "    in videos captured to illustrate bugs.\n"
854        "--time-limit TIME\n"
855        "    Set the maximum recording time, in seconds.  Default / maximum is %d.\n"
856        "--verbose\n"
857        "    Display interesting information on stdout.\n"
858        "--help\n"
859        "    Show this message.\n"
860        "\n"
861        "Recording continues until Ctrl-C is hit or the time limit is reached.\n"
862        "\n",
863        kVersionMajor, kVersionMinor, gBitRate / 1000000, gTimeLimitSec
864        );
865}
866
867/*
868 * Parses args and kicks things off.
869 */
870int main(int argc, char* const argv[]) {
871    static const struct option longOptions[] = {
872        { "help",               no_argument,        NULL, 'h' },
873        { "verbose",            no_argument,        NULL, 'v' },
874        { "size",               required_argument,  NULL, 's' },
875        { "bit-rate",           required_argument,  NULL, 'b' },
876        { "time-limit",         required_argument,  NULL, 't' },
877        { "bugreport",          no_argument,        NULL, 'u' },
878        // "unofficial" options
879        { "show-device-info",   no_argument,        NULL, 'i' },
880        { "show-frame-time",    no_argument,        NULL, 'f' },
881        { "rotate",             no_argument,        NULL, 'r' },
882        { "output-format",      required_argument,  NULL, 'o' },
883        { NULL,                 0,                  NULL, 0 }
884    };
885
886    while (true) {
887        int optionIndex = 0;
888        int ic = getopt_long(argc, argv, "", longOptions, &optionIndex);
889        if (ic == -1) {
890            break;
891        }
892
893        switch (ic) {
894        case 'h':
895            usage();
896            return 0;
897        case 'v':
898            gVerbose = true;
899            break;
900        case 's':
901            if (!parseWidthHeight(optarg, &gVideoWidth, &gVideoHeight)) {
902                fprintf(stderr, "Invalid size '%s', must be width x height\n",
903                        optarg);
904                return 2;
905            }
906            if (gVideoWidth == 0 || gVideoHeight == 0) {
907                fprintf(stderr,
908                    "Invalid size %ux%u, width and height may not be zero\n",
909                    gVideoWidth, gVideoHeight);
910                return 2;
911            }
912            gSizeSpecified = true;
913            break;
914        case 'b':
915            if (parseValueWithUnit(optarg, &gBitRate) != NO_ERROR) {
916                return 2;
917            }
918            if (gBitRate < kMinBitRate || gBitRate > kMaxBitRate) {
919                fprintf(stderr,
920                        "Bit rate %dbps outside acceptable range [%d,%d]\n",
921                        gBitRate, kMinBitRate, kMaxBitRate);
922                return 2;
923            }
924            break;
925        case 't':
926            gTimeLimitSec = atoi(optarg);
927            if (gTimeLimitSec == 0 || gTimeLimitSec > kMaxTimeLimitSec) {
928                fprintf(stderr,
929                        "Time limit %ds outside acceptable range [1,%d]\n",
930                        gTimeLimitSec, kMaxTimeLimitSec);
931                return 2;
932            }
933            break;
934        case 'u':
935            gWantInfoScreen = true;
936            gWantFrameTime = true;
937            break;
938        case 'i':
939            gWantInfoScreen = true;
940            break;
941        case 'f':
942            gWantFrameTime = true;
943            break;
944        case 'r':
945            // experimental feature
946            gRotate = true;
947            break;
948        case 'o':
949            if (strcmp(optarg, "mp4") == 0) {
950                gOutputFormat = FORMAT_MP4;
951            } else if (strcmp(optarg, "h264") == 0) {
952                gOutputFormat = FORMAT_H264;
953            } else if (strcmp(optarg, "frames") == 0) {
954                gOutputFormat = FORMAT_FRAMES;
955            } else if (strcmp(optarg, "raw-frames") == 0) {
956                gOutputFormat = FORMAT_RAW_FRAMES;
957            } else {
958                fprintf(stderr, "Unknown format '%s'\n", optarg);
959                return 2;
960            }
961            break;
962        default:
963            if (ic != '?') {
964                fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
965            }
966            return 2;
967        }
968    }
969
970    if (optind != argc - 1) {
971        fprintf(stderr, "Must specify output file (see --help).\n");
972        return 2;
973    }
974
975    const char* fileName = argv[optind];
976    if (gOutputFormat == FORMAT_MP4) {
977        // MediaMuxer tries to create the file in the constructor, but we don't
978        // learn about the failure until muxer.start(), which returns a generic
979        // error code without logging anything.  We attempt to create the file
980        // now for better diagnostics.
981        int fd = open(fileName, O_CREAT | O_RDWR, 0644);
982        if (fd < 0) {
983            fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno));
984            return 1;
985        }
986        close(fd);
987    }
988
989    status_t err = recordScreen(fileName);
990    if (err == NO_ERROR) {
991        // Try to notify the media scanner.  Not fatal if this fails.
992        notifyMediaScanner(fileName);
993    }
994    ALOGD(err == NO_ERROR ? "success" : "failed");
995    return (int) err;
996}
997