screenrecord.cpp revision 884989c67081190ff864419328e9e81506db67ca
1/*
2 * Copyright 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#include <assert.h>
18#include <ctype.h>
19#include <fcntl.h>
20#include <inttypes.h>
21#include <getopt.h>
22#include <signal.h>
23#include <stdio.h>
24#include <stdlib.h>
25#include <string.h>
26#include <sys/wait.h>
27#include <termios.h>
28#include <unistd.h>
29
30#define LOG_TAG "ScreenRecord"
31#define ATRACE_TAG ATRACE_TAG_GRAPHICS
32//#define LOG_NDEBUG 0
33#include <utils/Log.h>
34
35#include <binder/IPCThreadState.h>
36#include <utils/Errors.h>
37#include <utils/Timers.h>
38#include <utils/Trace.h>
39
40#include <gui/Surface.h>
41#include <gui/SurfaceComposerClient.h>
42#include <gui/ISurfaceComposer.h>
43#include <ui/DisplayInfo.h>
44#include <media/openmax/OMX_IVCommon.h>
45#include <media/stagefright/foundation/ABuffer.h>
46#include <media/stagefright/foundation/AMessage.h>
47#include <media/stagefright/MediaCodec.h>
48#include <media/stagefright/MediaErrors.h>
49#include <media/stagefright/MediaMuxer.h>
50#include <media/ICrypto.h>
51
52#include "screenrecord.h"
53#include "Overlay.h"
54#include "FrameOutput.h"
55
56using namespace android;
57
58static const uint32_t kMinBitRate = 100000;         // 0.1Mbps
59static const uint32_t kMaxBitRate = 200 * 1000000;  // 200Mbps
60static const uint32_t kMaxTimeLimitSec = 180;       // 3 minutes
61static const uint32_t kFallbackWidth = 1280;        // 720p
62static const uint32_t kFallbackHeight = 720;
63static const char* kMimeTypeAvc = "video/avc";
64
65// Command-line parameters.
66static bool gVerbose = false;           // chatty on stdout
67static bool gRotate = false;            // rotate 90 degrees
68static enum {
69    FORMAT_MP4, FORMAT_H264, FORMAT_FRAMES, FORMAT_RAW_FRAMES
70} gOutputFormat = FORMAT_MP4;           // data format for output
71static bool gSizeSpecified = false;     // was size explicitly requested?
72static bool gWantInfoScreen = false;    // do we want initial info screen?
73static bool gWantFrameTime = false;     // do we want times on each frame?
74static uint32_t gVideoWidth = 0;        // default width+height
75static uint32_t gVideoHeight = 0;
76static uint32_t gBitRate = 4000000;     // 4Mbps
77static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
78
79// Set by signal handler to stop recording.
80static volatile bool gStopRequested;
81
82// Previous signal handler state, restored after first hit.
83static struct sigaction gOrigSigactionINT;
84static struct sigaction gOrigSigactionHUP;
85
86
87/*
88 * Catch keyboard interrupt signals.  On receipt, the "stop requested"
89 * flag is raised, and the original handler is restored (so that, if
90 * we get stuck finishing, a second Ctrl-C will kill the process).
91 */
92static void signalCatcher(int signum)
93{
94    gStopRequested = true;
95    switch (signum) {
96    case SIGINT:
97    case SIGHUP:
98        sigaction(SIGINT, &gOrigSigactionINT, NULL);
99        sigaction(SIGHUP, &gOrigSigactionHUP, NULL);
100        break;
101    default:
102        abort();
103        break;
104    }
105}
106
107/*
108 * Configures signal handlers.  The previous handlers are saved.
109 *
110 * If the command is run from an interactive adb shell, we get SIGINT
111 * when Ctrl-C is hit.  If we're run from the host, the local adb process
112 * gets the signal, and we get a SIGHUP when the terminal disconnects.
113 */
114static status_t configureSignals() {
115    struct sigaction act;
116    memset(&act, 0, sizeof(act));
117    act.sa_handler = signalCatcher;
118    if (sigaction(SIGINT, &act, &gOrigSigactionINT) != 0) {
119        status_t err = -errno;
120        fprintf(stderr, "Unable to configure SIGINT handler: %s\n",
121                strerror(errno));
122        return err;
123    }
124    if (sigaction(SIGHUP, &act, &gOrigSigactionHUP) != 0) {
125        status_t err = -errno;
126        fprintf(stderr, "Unable to configure SIGHUP handler: %s\n",
127                strerror(errno));
128        return err;
129    }
130    return NO_ERROR;
131}
132
133/*
134 * Returns "true" if the device is rotated 90 degrees.
135 */
136static bool isDeviceRotated(int orientation) {
137    return orientation != DISPLAY_ORIENTATION_0 &&
138            orientation != DISPLAY_ORIENTATION_180;
139}
140
141/*
142 * Configures and starts the MediaCodec encoder.  Obtains an input surface
143 * from the codec.
144 */
145static status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
146        sp<IGraphicBufferProducer>* pBufferProducer) {
147    status_t err;
148
149    if (gVerbose) {
150        printf("Configuring recorder for %dx%d %s at %.2fMbps\n",
151                gVideoWidth, gVideoHeight, kMimeTypeAvc, gBitRate / 1000000.0);
152    }
153
154    sp<AMessage> format = new AMessage;
155    format->setInt32("width", gVideoWidth);
156    format->setInt32("height", gVideoHeight);
157    format->setString("mime", kMimeTypeAvc);
158    format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
159    format->setInt32("bitrate", gBitRate);
160    format->setFloat("frame-rate", displayFps);
161    format->setInt32("i-frame-interval", 10);
162
163    sp<ALooper> looper = new ALooper;
164    looper->setName("screenrecord_looper");
165    looper->start();
166    ALOGV("Creating codec");
167    sp<MediaCodec> codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
168    if (codec == NULL) {
169        fprintf(stderr, "ERROR: unable to create %s codec instance\n",
170                kMimeTypeAvc);
171        return UNKNOWN_ERROR;
172    }
173
174    err = codec->configure(format, NULL, NULL,
175            MediaCodec::CONFIGURE_FLAG_ENCODE);
176    if (err != NO_ERROR) {
177        fprintf(stderr, "ERROR: unable to configure %s codec at %dx%d (err=%d)\n",
178                kMimeTypeAvc, gVideoWidth, gVideoHeight, err);
179        codec->release();
180        return err;
181    }
182
183    ALOGV("Creating encoder input surface");
184    sp<IGraphicBufferProducer> bufferProducer;
185    err = codec->createInputSurface(&bufferProducer);
186    if (err != NO_ERROR) {
187        fprintf(stderr,
188            "ERROR: unable to create encoder input surface (err=%d)\n", err);
189        codec->release();
190        return err;
191    }
192
193    ALOGV("Starting codec");
194    err = codec->start();
195    if (err != NO_ERROR) {
196        fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err);
197        codec->release();
198        return err;
199    }
200
201    ALOGV("Codec prepared");
202    *pCodec = codec;
203    *pBufferProducer = bufferProducer;
204    return 0;
205}
206
207/*
208 * Sets the display projection, based on the display dimensions, video size,
209 * and device orientation.
210 */
211static status_t setDisplayProjection(const sp<IBinder>& dpy,
212        const DisplayInfo& mainDpyInfo) {
213    status_t err;
214
215    // Set the region of the layer stack we're interested in, which in our
216    // case is "all of it".  If the app is rotated (so that the width of the
217    // app is based on the height of the display), reverse width/height.
218    bool deviceRotated = isDeviceRotated(mainDpyInfo.orientation);
219    uint32_t sourceWidth, sourceHeight;
220    if (!deviceRotated) {
221        sourceWidth = mainDpyInfo.w;
222        sourceHeight = mainDpyInfo.h;
223    } else {
224        ALOGV("using rotated width/height");
225        sourceHeight = mainDpyInfo.w;
226        sourceWidth = mainDpyInfo.h;
227    }
228    Rect layerStackRect(sourceWidth, sourceHeight);
229
230    // We need to preserve the aspect ratio of the display.
231    float displayAspect = (float) sourceHeight / (float) sourceWidth;
232
233
234    // Set the way we map the output onto the display surface (which will
235    // be e.g. 1280x720 for a 720p video).  The rect is interpreted
236    // post-rotation, so if the display is rotated 90 degrees we need to
237    // "pre-rotate" it by flipping width/height, so that the orientation
238    // adjustment changes it back.
239    //
240    // We might want to encode a portrait display as landscape to use more
241    // of the screen real estate.  (If players respect a 90-degree rotation
242    // hint, we can essentially get a 720x1280 video instead of 1280x720.)
243    // In that case, we swap the configured video width/height and then
244    // supply a rotation value to the display projection.
245    uint32_t videoWidth, videoHeight;
246    uint32_t outWidth, outHeight;
247    if (!gRotate) {
248        videoWidth = gVideoWidth;
249        videoHeight = gVideoHeight;
250    } else {
251        videoWidth = gVideoHeight;
252        videoHeight = gVideoWidth;
253    }
254    if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
255        // limited by narrow width; reduce height
256        outWidth = videoWidth;
257        outHeight = (uint32_t)(videoWidth * displayAspect);
258    } else {
259        // limited by short height; restrict width
260        outHeight = videoHeight;
261        outWidth = (uint32_t)(videoHeight / displayAspect);
262    }
263    uint32_t offX, offY;
264    offX = (videoWidth - outWidth) / 2;
265    offY = (videoHeight - outHeight) / 2;
266    Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
267
268    if (gVerbose) {
269        if (gRotate) {
270            printf("Rotated content area is %ux%u at offset x=%d y=%d\n",
271                    outHeight, outWidth, offY, offX);
272        } else {
273            printf("Content area is %ux%u at offset x=%d y=%d\n",
274                    outWidth, outHeight, offX, offY);
275        }
276    }
277
278    SurfaceComposerClient::setDisplayProjection(dpy,
279            gRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
280            layerStackRect, displayRect);
281    return NO_ERROR;
282}
283
284/*
285 * Configures the virtual display.  When this completes, virtual display
286 * frames will start arriving from the buffer producer.
287 */
288static status_t prepareVirtualDisplay(const DisplayInfo& mainDpyInfo,
289        const sp<IGraphicBufferProducer>& bufferProducer,
290        sp<IBinder>* pDisplayHandle) {
291    sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
292            String8("ScreenRecorder"), false /*secure*/);
293
294    SurfaceComposerClient::openGlobalTransaction();
295    SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer);
296    setDisplayProjection(dpy, mainDpyInfo);
297    SurfaceComposerClient::setDisplayLayerStack(dpy, 0);    // default stack
298    SurfaceComposerClient::closeGlobalTransaction();
299
300    *pDisplayHandle = dpy;
301
302    return NO_ERROR;
303}
304
305/*
306 * Runs the MediaCodec encoder, sending the output to the MediaMuxer.  The
307 * input frames are coming from the virtual display as fast as SurfaceFlinger
308 * wants to send them.
309 *
310 * Exactly one of muxer or rawFp must be non-null.
311 *
312 * The muxer must *not* have been started before calling.
313 */
314static status_t runEncoder(const sp<MediaCodec>& encoder,
315        const sp<MediaMuxer>& muxer, FILE* rawFp, const sp<IBinder>& mainDpy,
316        const sp<IBinder>& virtualDpy, uint8_t orientation) {
317    static int kTimeout = 250000;   // be responsive on signal
318    status_t err;
319    ssize_t trackIdx = -1;
320    uint32_t debugNumFrames = 0;
321    int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
322    int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
323    DisplayInfo mainDpyInfo;
324
325    assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
326
327    Vector<sp<ABuffer> > buffers;
328    err = encoder->getOutputBuffers(&buffers);
329    if (err != NO_ERROR) {
330        fprintf(stderr, "Unable to get output buffers (err=%d)\n", err);
331        return err;
332    }
333
334    // This is set by the signal handler.
335    gStopRequested = false;
336
337    // Run until we're signaled.
338    while (!gStopRequested) {
339        size_t bufIndex, offset, size;
340        int64_t ptsUsec;
341        uint32_t flags;
342
343        if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
344            if (gVerbose) {
345                printf("Time limit reached\n");
346            }
347            break;
348        }
349
350        ALOGV("Calling dequeueOutputBuffer");
351        err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
352                &flags, kTimeout);
353        ALOGV("dequeueOutputBuffer returned %d", err);
354        switch (err) {
355        case NO_ERROR:
356            // got a buffer
357            if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
358                ALOGV("Got codec config buffer (%zu bytes)", size);
359                if (muxer != NULL) {
360                    // ignore this -- we passed the CSD into MediaMuxer when
361                    // we got the format change notification
362                    size = 0;
363                }
364            }
365            if (size != 0) {
366                ALOGV("Got data in buffer %zu, size=%zu, pts=%" PRId64,
367                        bufIndex, size, ptsUsec);
368
369                { // scope
370                    ATRACE_NAME("orientation");
371                    // Check orientation, update if it has changed.
372                    //
373                    // Polling for changes is inefficient and wrong, but the
374                    // useful stuff is hard to get at without a Dalvik VM.
375                    err = SurfaceComposerClient::getDisplayInfo(mainDpy,
376                            &mainDpyInfo);
377                    if (err != NO_ERROR) {
378                        ALOGW("getDisplayInfo(main) failed: %d", err);
379                    } else if (orientation != mainDpyInfo.orientation) {
380                        ALOGD("orientation changed, now %d", mainDpyInfo.orientation);
381                        SurfaceComposerClient::openGlobalTransaction();
382                        setDisplayProjection(virtualDpy, mainDpyInfo);
383                        SurfaceComposerClient::closeGlobalTransaction();
384                        orientation = mainDpyInfo.orientation;
385                    }
386                }
387
388                // If the virtual display isn't providing us with timestamps,
389                // use the current time.  This isn't great -- we could get
390                // decoded data in clusters -- but we're not expecting
391                // to hit this anyway.
392                if (ptsUsec == 0) {
393                    ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
394                }
395
396                if (muxer == NULL) {
397                    fwrite(buffers[bufIndex]->data(), 1, size, rawFp);
398                    // Flush the data immediately in case we're streaming.
399                    // We don't want to do this if all we've written is
400                    // the SPS/PPS data because mplayer gets confused.
401                    if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0) {
402                        fflush(rawFp);
403                    }
404                } else {
405                    // The MediaMuxer docs are unclear, but it appears that we
406                    // need to pass either the full set of BufferInfo flags, or
407                    // (flags & BUFFER_FLAG_SYNCFRAME).
408                    //
409                    // If this blocks for too long we could drop frames.  We may
410                    // want to queue these up and do them on a different thread.
411                    ATRACE_NAME("write sample");
412                    assert(trackIdx != -1);
413                    err = muxer->writeSampleData(buffers[bufIndex], trackIdx,
414                            ptsUsec, flags);
415                    if (err != NO_ERROR) {
416                        fprintf(stderr,
417                            "Failed writing data to muxer (err=%d)\n", err);
418                        return err;
419                    }
420                }
421                debugNumFrames++;
422            }
423            err = encoder->releaseOutputBuffer(bufIndex);
424            if (err != NO_ERROR) {
425                fprintf(stderr, "Unable to release output buffer (err=%d)\n",
426                        err);
427                return err;
428            }
429            if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
430                // Not expecting EOS from SurfaceFlinger.  Go with it.
431                ALOGI("Received end-of-stream");
432                gStopRequested = true;
433            }
434            break;
435        case -EAGAIN:                       // INFO_TRY_AGAIN_LATER
436            ALOGV("Got -EAGAIN, looping");
437            break;
438        case INFO_FORMAT_CHANGED:           // INFO_OUTPUT_FORMAT_CHANGED
439            {
440                // Format includes CSD, which we must provide to muxer.
441                ALOGV("Encoder format changed");
442                sp<AMessage> newFormat;
443                encoder->getOutputFormat(&newFormat);
444                if (muxer != NULL) {
445                    trackIdx = muxer->addTrack(newFormat);
446                    ALOGV("Starting muxer");
447                    err = muxer->start();
448                    if (err != NO_ERROR) {
449                        fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
450                        return err;
451                    }
452                }
453            }
454            break;
455        case INFO_OUTPUT_BUFFERS_CHANGED:   // INFO_OUTPUT_BUFFERS_CHANGED
456            // Not expected for an encoder; handle it anyway.
457            ALOGV("Encoder buffers changed");
458            err = encoder->getOutputBuffers(&buffers);
459            if (err != NO_ERROR) {
460                fprintf(stderr,
461                        "Unable to get new output buffers (err=%d)\n", err);
462                return err;
463            }
464            break;
465        case INVALID_OPERATION:
466            ALOGW("dequeueOutputBuffer returned INVALID_OPERATION");
467            return err;
468        default:
469            fprintf(stderr,
470                    "Got weird result %d from dequeueOutputBuffer\n", err);
471            return err;
472        }
473    }
474
475    ALOGV("Encoder stopping (req=%d)", gStopRequested);
476    if (gVerbose) {
477        printf("Encoder stopping; recorded %u frames in %" PRId64 " seconds\n",
478                debugNumFrames, nanoseconds_to_seconds(
479                        systemTime(CLOCK_MONOTONIC) - startWhenNsec));
480    }
481    return NO_ERROR;
482}
483
484/*
485 * Raw H.264 byte stream output requested.  Send the output to stdout
486 * if desired.  If the output is a tty, reconfigure it to avoid the
487 * CRLF line termination that we see with "adb shell" commands.
488 */
489static FILE* prepareRawOutput(const char* fileName) {
490    FILE* rawFp = NULL;
491
492    if (strcmp(fileName, "-") == 0) {
493        if (gVerbose) {
494            fprintf(stderr, "ERROR: verbose output and '-' not compatible");
495            return NULL;
496        }
497        rawFp = stdout;
498    } else {
499        rawFp = fopen(fileName, "w");
500        if (rawFp == NULL) {
501            fprintf(stderr, "fopen raw failed: %s\n", strerror(errno));
502            return NULL;
503        }
504    }
505
506    int fd = fileno(rawFp);
507    if (isatty(fd)) {
508        // best effort -- reconfigure tty for "raw"
509        ALOGD("raw video output to tty (fd=%d)", fd);
510        struct termios term;
511        if (tcgetattr(fd, &term) == 0) {
512            cfmakeraw(&term);
513            if (tcsetattr(fd, TCSANOW, &term) == 0) {
514                ALOGD("tty successfully configured for raw");
515            }
516        }
517    }
518
519    return rawFp;
520}
521
522/*
523 * Main "do work" start point.
524 *
525 * Configures codec, muxer, and virtual display, then starts moving bits
526 * around.
527 */
528static status_t recordScreen(const char* fileName) {
529    status_t err;
530
531    // Configure signal handler.
532    err = configureSignals();
533    if (err != NO_ERROR) return err;
534
535    // Start Binder thread pool.  MediaCodec needs to be able to receive
536    // messages from mediaserver.
537    sp<ProcessState> self = ProcessState::self();
538    self->startThreadPool();
539
540    // Get main display parameters.
541    sp<IBinder> mainDpy = SurfaceComposerClient::getBuiltInDisplay(
542            ISurfaceComposer::eDisplayIdMain);
543    DisplayInfo mainDpyInfo;
544    err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo);
545    if (err != NO_ERROR) {
546        fprintf(stderr, "ERROR: unable to get display characteristics\n");
547        return err;
548    }
549    if (gVerbose) {
550        printf("Main display is %dx%d @%.2ffps (orientation=%u)\n",
551                mainDpyInfo.w, mainDpyInfo.h, mainDpyInfo.fps,
552                mainDpyInfo.orientation);
553    }
554
555    bool rotated = isDeviceRotated(mainDpyInfo.orientation);
556    if (gVideoWidth == 0) {
557        gVideoWidth = rotated ? mainDpyInfo.h : mainDpyInfo.w;
558    }
559    if (gVideoHeight == 0) {
560        gVideoHeight = rotated ? mainDpyInfo.w : mainDpyInfo.h;
561    }
562
563    // Configure and start the encoder.
564    sp<MediaCodec> encoder;
565    sp<FrameOutput> frameOutput;
566    sp<IGraphicBufferProducer> encoderInputSurface;
567    if (gOutputFormat != FORMAT_FRAMES && gOutputFormat != FORMAT_RAW_FRAMES) {
568        err = prepareEncoder(mainDpyInfo.fps, &encoder, &encoderInputSurface);
569
570        if (err != NO_ERROR && !gSizeSpecified) {
571            // fallback is defined for landscape; swap if we're in portrait
572            bool needSwap = gVideoWidth < gVideoHeight;
573            uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
574            uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
575            if (gVideoWidth != newWidth && gVideoHeight != newHeight) {
576                ALOGV("Retrying with 720p");
577                fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
578                        gVideoWidth, gVideoHeight, newWidth, newHeight);
579                gVideoWidth = newWidth;
580                gVideoHeight = newHeight;
581                err = prepareEncoder(mainDpyInfo.fps, &encoder,
582                        &encoderInputSurface);
583            }
584        }
585        if (err != NO_ERROR) return err;
586
587        // From here on, we must explicitly release() the encoder before it goes
588        // out of scope, or we will get an assertion failure from stagefright
589        // later on in a different thread.
590    } else {
591        // We're not using an encoder at all.  The "encoder input surface" we hand to
592        // SurfaceFlinger will just feed directly to us.
593        frameOutput = new FrameOutput();
594        err = frameOutput->createInputSurface(gVideoWidth, gVideoHeight, &encoderInputSurface);
595        if (err != NO_ERROR) {
596            return err;
597        }
598    }
599
600    // Draw the "info" page by rendering a frame with GLES and sending
601    // it directly to the encoder.
602    // TODO: consider displaying this as a regular layer to avoid b/11697754
603    if (gWantInfoScreen) {
604        Overlay::drawInfoPage(encoderInputSurface);
605    }
606
607    // Configure optional overlay.
608    sp<IGraphicBufferProducer> bufferProducer;
609    sp<Overlay> overlay;
610    if (gWantFrameTime) {
611        // Send virtual display frames to an external texture.
612        overlay = new Overlay();
613        err = overlay->start(encoderInputSurface, &bufferProducer);
614        if (err != NO_ERROR) {
615            if (encoder != NULL) encoder->release();
616            return err;
617        }
618        if (gVerbose) {
619            printf("Bugreport overlay created\n");
620        }
621    } else {
622        // Use the encoder's input surface as the virtual display surface.
623        bufferProducer = encoderInputSurface;
624    }
625
626    // Configure virtual display.
627    sp<IBinder> dpy;
628    err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy);
629    if (err != NO_ERROR) {
630        if (encoder != NULL) encoder->release();
631        return err;
632    }
633
634    sp<MediaMuxer> muxer = NULL;
635    FILE* rawFp = NULL;
636    switch (gOutputFormat) {
637        case FORMAT_MP4: {
638            // Configure muxer.  We have to wait for the CSD blob from the encoder
639            // before we can start it.
640            muxer = new MediaMuxer(fileName, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
641            if (gRotate) {
642                muxer->setOrientationHint(90);  // TODO: does this do anything?
643            }
644            break;
645        }
646        case FORMAT_H264:
647        case FORMAT_FRAMES:
648        case FORMAT_RAW_FRAMES: {
649            rawFp = prepareRawOutput(fileName);
650            if (rawFp == NULL) {
651                if (encoder != NULL) encoder->release();
652                return -1;
653            }
654            break;
655        }
656        default:
657            fprintf(stderr, "ERROR: unknown format %d\n", gOutputFormat);
658            abort();
659    }
660
661    if (gOutputFormat == FORMAT_FRAMES || gOutputFormat == FORMAT_RAW_FRAMES) {
662        // TODO: if we want to make this a proper feature, we should output
663        //       an outer header with version info.  Right now we never change
664        //       the frame size or format, so we could conceivably just send
665        //       the current frame header once and then follow it with an
666        //       unbroken stream of data.
667
668        // Make the EGL context current again.  This gets unhooked if we're
669        // using "--bugreport" mode.
670        // TODO: figure out if we can eliminate this
671        frameOutput->prepareToCopy();
672
673        while (!gStopRequested) {
674            // Poll for frames, the same way we do for MediaCodec.  We do
675            // all of the work on the main thread.
676            //
677            // Ideally we'd sleep indefinitely and wake when the
678            // stop was requested, but this will do for now.  (It almost
679            // works because wait() wakes when a signal hits, but we
680            // need to handle the edge cases.)
681            bool rawFrames = gOutputFormat == FORMAT_RAW_FRAMES;
682            err = frameOutput->copyFrame(rawFp, 250000, rawFrames);
683            if (err == ETIMEDOUT) {
684                err = NO_ERROR;
685            } else if (err != NO_ERROR) {
686                ALOGE("Got error %d from copyFrame()", err);
687                break;
688            }
689        }
690    } else {
691        // Main encoder loop.
692        err = runEncoder(encoder, muxer, rawFp, mainDpy, dpy,
693                mainDpyInfo.orientation);
694        if (err != NO_ERROR) {
695            fprintf(stderr, "Encoder failed (err=%d)\n", err);
696            // fall through to cleanup
697        }
698
699        if (gVerbose) {
700            printf("Stopping encoder and muxer\n");
701        }
702    }
703
704    // Shut everything down, starting with the producer side.
705    encoderInputSurface = NULL;
706    SurfaceComposerClient::destroyDisplay(dpy);
707    if (overlay != NULL) overlay->stop();
708    if (encoder != NULL) encoder->stop();
709    if (muxer != NULL) {
710        // If we don't stop muxer explicitly, i.e. let the destructor run,
711        // it may hang (b/11050628).
712        muxer->stop();
713    } else if (rawFp != stdout) {
714        fclose(rawFp);
715    }
716    if (encoder != NULL) encoder->release();
717
718    return err;
719}
720
721/*
722 * Sends a broadcast to the media scanner to tell it about the new video.
723 *
724 * This is optional, but nice to have.
725 */
726static status_t notifyMediaScanner(const char* fileName) {
727    // need to do allocations before the fork()
728    String8 fileUrl("file://");
729    fileUrl.append(fileName);
730
731    const char* kCommand = "/system/bin/am";
732    const char* const argv[] = {
733            kCommand,
734            "broadcast",
735            "-a",
736            "android.intent.action.MEDIA_SCANNER_SCAN_FILE",
737            "-d",
738            fileUrl.string(),
739            NULL
740    };
741    if (gVerbose) {
742        printf("Executing:");
743        for (int i = 0; argv[i] != NULL; i++) {
744            printf(" %s", argv[i]);
745        }
746        putchar('\n');
747    }
748
749    pid_t pid = fork();
750    if (pid < 0) {
751        int err = errno;
752        ALOGW("fork() failed: %s", strerror(err));
753        return -err;
754    } else if (pid > 0) {
755        // parent; wait for the child, mostly to make the verbose-mode output
756        // look right, but also to check for and log failures
757        int status;
758        pid_t actualPid = TEMP_FAILURE_RETRY(waitpid(pid, &status, 0));
759        if (actualPid != pid) {
760            ALOGW("waitpid(%d) returned %d (errno=%d)", pid, actualPid, errno);
761        } else if (status != 0) {
762            ALOGW("'am broadcast' exited with status=%d", status);
763        } else {
764            ALOGV("'am broadcast' exited successfully");
765        }
766    } else {
767        if (!gVerbose) {
768            // non-verbose, suppress 'am' output
769            ALOGV("closing stdout/stderr in child");
770            int fd = open("/dev/null", O_WRONLY);
771            if (fd >= 0) {
772                dup2(fd, STDOUT_FILENO);
773                dup2(fd, STDERR_FILENO);
774                close(fd);
775            }
776        }
777        execv(kCommand, const_cast<char* const*>(argv));
778        ALOGE("execv(%s) failed: %s\n", kCommand, strerror(errno));
779        exit(1);
780    }
781    return NO_ERROR;
782}
783
784/*
785 * Parses a string of the form "1280x720".
786 *
787 * Returns true on success.
788 */
789static bool parseWidthHeight(const char* widthHeight, uint32_t* pWidth,
790        uint32_t* pHeight) {
791    long width, height;
792    char* end;
793
794    // Must specify base 10, or "0x0" gets parsed differently.
795    width = strtol(widthHeight, &end, 10);
796    if (end == widthHeight || *end != 'x' || *(end+1) == '\0') {
797        // invalid chars in width, or missing 'x', or missing height
798        return false;
799    }
800    height = strtol(end + 1, &end, 10);
801    if (*end != '\0') {
802        // invalid chars in height
803        return false;
804    }
805
806    *pWidth = width;
807    *pHeight = height;
808    return true;
809}
810
811/*
812 * Accepts a string with a bare number ("4000000") or with a single-character
813 * unit ("4m").
814 *
815 * Returns an error if parsing fails.
816 */
817static status_t parseValueWithUnit(const char* str, uint32_t* pValue) {
818    long value;
819    char* endptr;
820
821    value = strtol(str, &endptr, 10);
822    if (*endptr == '\0') {
823        // bare number
824        *pValue = value;
825        return NO_ERROR;
826    } else if (toupper(*endptr) == 'M' && *(endptr+1) == '\0') {
827        *pValue = value * 1000000;  // check for overflow?
828        return NO_ERROR;
829    } else {
830        fprintf(stderr, "Unrecognized value: %s\n", str);
831        return UNKNOWN_ERROR;
832    }
833}
834
835/*
836 * Dumps usage on stderr.
837 */
838static void usage() {
839    fprintf(stderr,
840        "Usage: screenrecord [options] <filename>\n"
841        "\n"
842        "Android screenrecord v%d.%d.  Records the device's display to a .mp4 file.\n"
843        "\n"
844        "Options:\n"
845        "--size WIDTHxHEIGHT\n"
846        "    Set the video size, e.g. \"1280x720\".  Default is the device's main\n"
847        "    display resolution (if supported), 1280x720 if not.  For best results,\n"
848        "    use a size supported by the AVC encoder.\n"
849        "--bit-rate RATE\n"
850        "    Set the video bit rate, in bits per second.  Value may be specified as\n"
851        "    bits or megabits, e.g. '4000000' is equivalent to '4M'.  Default %dMbps.\n"
852        "--bugreport\n"
853        "    Add additional information, such as a timestamp overlay, that is helpful\n"
854        "    in videos captured to illustrate bugs.\n"
855        "--time-limit TIME\n"
856        "    Set the maximum recording time, in seconds.  Default / maximum is %d.\n"
857        "--verbose\n"
858        "    Display interesting information on stdout.\n"
859        "--help\n"
860        "    Show this message.\n"
861        "\n"
862        "Recording continues until Ctrl-C is hit or the time limit is reached.\n"
863        "\n",
864        kVersionMajor, kVersionMinor, gBitRate / 1000000, gTimeLimitSec
865        );
866}
867
868/*
869 * Parses args and kicks things off.
870 */
871int main(int argc, char* const argv[]) {
872    static const struct option longOptions[] = {
873        { "help",               no_argument,        NULL, 'h' },
874        { "verbose",            no_argument,        NULL, 'v' },
875        { "size",               required_argument,  NULL, 's' },
876        { "bit-rate",           required_argument,  NULL, 'b' },
877        { "time-limit",         required_argument,  NULL, 't' },
878        { "bugreport",          no_argument,        NULL, 'u' },
879        // "unofficial" options
880        { "show-device-info",   no_argument,        NULL, 'i' },
881        { "show-frame-time",    no_argument,        NULL, 'f' },
882        { "rotate",             no_argument,        NULL, 'r' },
883        { "output-format",      required_argument,  NULL, 'o' },
884        { NULL,                 0,                  NULL, 0 }
885    };
886
887    while (true) {
888        int optionIndex = 0;
889        int ic = getopt_long(argc, argv, "", longOptions, &optionIndex);
890        if (ic == -1) {
891            break;
892        }
893
894        switch (ic) {
895        case 'h':
896            usage();
897            return 0;
898        case 'v':
899            gVerbose = true;
900            break;
901        case 's':
902            if (!parseWidthHeight(optarg, &gVideoWidth, &gVideoHeight)) {
903                fprintf(stderr, "Invalid size '%s', must be width x height\n",
904                        optarg);
905                return 2;
906            }
907            if (gVideoWidth == 0 || gVideoHeight == 0) {
908                fprintf(stderr,
909                    "Invalid size %ux%u, width and height may not be zero\n",
910                    gVideoWidth, gVideoHeight);
911                return 2;
912            }
913            gSizeSpecified = true;
914            break;
915        case 'b':
916            if (parseValueWithUnit(optarg, &gBitRate) != NO_ERROR) {
917                return 2;
918            }
919            if (gBitRate < kMinBitRate || gBitRate > kMaxBitRate) {
920                fprintf(stderr,
921                        "Bit rate %dbps outside acceptable range [%d,%d]\n",
922                        gBitRate, kMinBitRate, kMaxBitRate);
923                return 2;
924            }
925            break;
926        case 't':
927            gTimeLimitSec = atoi(optarg);
928            if (gTimeLimitSec == 0 || gTimeLimitSec > kMaxTimeLimitSec) {
929                fprintf(stderr,
930                        "Time limit %ds outside acceptable range [1,%d]\n",
931                        gTimeLimitSec, kMaxTimeLimitSec);
932                return 2;
933            }
934            break;
935        case 'u':
936            gWantInfoScreen = true;
937            gWantFrameTime = true;
938            break;
939        case 'i':
940            gWantInfoScreen = true;
941            break;
942        case 'f':
943            gWantFrameTime = true;
944            break;
945        case 'r':
946            // experimental feature
947            gRotate = true;
948            break;
949        case 'o':
950            if (strcmp(optarg, "mp4") == 0) {
951                gOutputFormat = FORMAT_MP4;
952            } else if (strcmp(optarg, "h264") == 0) {
953                gOutputFormat = FORMAT_H264;
954            } else if (strcmp(optarg, "frames") == 0) {
955                gOutputFormat = FORMAT_FRAMES;
956            } else if (strcmp(optarg, "raw-frames") == 0) {
957                gOutputFormat = FORMAT_RAW_FRAMES;
958            } else {
959                fprintf(stderr, "Unknown format '%s'\n", optarg);
960                return 2;
961            }
962            break;
963        default:
964            if (ic != '?') {
965                fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
966            }
967            return 2;
968        }
969    }
970
971    if (optind != argc - 1) {
972        fprintf(stderr, "Must specify output file (see --help).\n");
973        return 2;
974    }
975
976    const char* fileName = argv[optind];
977    if (gOutputFormat == FORMAT_MP4) {
978        // MediaMuxer tries to create the file in the constructor, but we don't
979        // learn about the failure until muxer.start(), which returns a generic
980        // error code without logging anything.  We attempt to create the file
981        // now for better diagnostics.
982        int fd = open(fileName, O_CREAT | O_RDWR, 0644);
983        if (fd < 0) {
984            fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno));
985            return 1;
986        }
987        close(fd);
988    }
989
990    status_t err = recordScreen(fileName);
991    if (err == NO_ERROR) {
992        // Try to notify the media scanner.  Not fatal if this fails.
993        notifyMediaScanner(fileName);
994    }
995    ALOGD(err == NO_ERROR ? "success" : "failed");
996    return (int) err;
997}
998