screenrecord.cpp revision 587c6fefcd3c1d05c608ff511cf3534bc765256e
1/*
2 * Copyright 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 *      http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17#define LOG_TAG "ScreenRecord"
18#define ATRACE_TAG ATRACE_TAG_GRAPHICS
19//#define LOG_NDEBUG 0
20#include <utils/Log.h>
21
22#include <binder/IPCThreadState.h>
23#include <utils/Errors.h>
24#include <utils/Timers.h>
25#include <utils/Trace.h>
26
27#include <gui/Surface.h>
28#include <gui/SurfaceComposerClient.h>
29#include <gui/ISurfaceComposer.h>
30#include <ui/DisplayInfo.h>
31#include <media/openmax/OMX_IVCommon.h>
32#include <media/stagefright/foundation/ABuffer.h>
33#include <media/stagefright/foundation/AMessage.h>
34#include <media/stagefright/MediaCodec.h>
35#include <media/stagefright/MediaErrors.h>
36#include <media/stagefright/MediaMuxer.h>
37#include <media/ICrypto.h>
38
39#include <stdlib.h>
40#include <unistd.h>
41#include <string.h>
42#include <stdio.h>
43#include <ctype.h>
44#include <fcntl.h>
45#include <signal.h>
46#include <getopt.h>
47#include <sys/wait.h>
48#include <termios.h>
49#include <assert.h>
50
51#include "screenrecord.h"
52#include "Overlay.h"
53#include "FrameOutput.h"
54
55using namespace android;
56
57static const uint32_t kMinBitRate = 100000;         // 0.1Mbps
58static const uint32_t kMaxBitRate = 200 * 1000000;  // 200Mbps
59static const uint32_t kMaxTimeLimitSec = 180;       // 3 minutes
60static const uint32_t kFallbackWidth = 1280;        // 720p
61static const uint32_t kFallbackHeight = 720;
62static const char* kMimeTypeAvc = "video/avc";
63
64// Command-line parameters.
65static bool gVerbose = false;           // chatty on stdout
66static bool gRotate = false;            // rotate 90 degrees
67static enum {
68    FORMAT_MP4, FORMAT_H264, FORMAT_FRAMES
69} gOutputFormat = FORMAT_MP4;           // data format for output
70static bool gSizeSpecified = false;     // was size explicitly requested?
71static bool gWantInfoScreen = false;    // do we want initial info screen?
72static bool gWantFrameTime = false;     // do we want times on each frame?
73static uint32_t gVideoWidth = 0;        // default width+height
74static uint32_t gVideoHeight = 0;
75static uint32_t gBitRate = 4000000;     // 4Mbps
76static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
77
78// Set by signal handler to stop recording.
79static volatile bool gStopRequested;
80
81// Previous signal handler state, restored after first hit.
82static struct sigaction gOrigSigactionINT;
83static struct sigaction gOrigSigactionHUP;
84
85
86/*
87 * Catch keyboard interrupt signals.  On receipt, the "stop requested"
88 * flag is raised, and the original handler is restored (so that, if
89 * we get stuck finishing, a second Ctrl-C will kill the process).
90 */
91static void signalCatcher(int signum)
92{
93    gStopRequested = true;
94    switch (signum) {
95    case SIGINT:
96    case SIGHUP:
97        sigaction(SIGINT, &gOrigSigactionINT, NULL);
98        sigaction(SIGHUP, &gOrigSigactionHUP, NULL);
99        break;
100    default:
101        abort();
102        break;
103    }
104}
105
106/*
107 * Configures signal handlers.  The previous handlers are saved.
108 *
109 * If the command is run from an interactive adb shell, we get SIGINT
110 * when Ctrl-C is hit.  If we're run from the host, the local adb process
111 * gets the signal, and we get a SIGHUP when the terminal disconnects.
112 */
113static status_t configureSignals() {
114    struct sigaction act;
115    memset(&act, 0, sizeof(act));
116    act.sa_handler = signalCatcher;
117    if (sigaction(SIGINT, &act, &gOrigSigactionINT) != 0) {
118        status_t err = -errno;
119        fprintf(stderr, "Unable to configure SIGINT handler: %s\n",
120                strerror(errno));
121        return err;
122    }
123    if (sigaction(SIGHUP, &act, &gOrigSigactionHUP) != 0) {
124        status_t err = -errno;
125        fprintf(stderr, "Unable to configure SIGHUP handler: %s\n",
126                strerror(errno));
127        return err;
128    }
129    return NO_ERROR;
130}
131
132/*
133 * Returns "true" if the device is rotated 90 degrees.
134 */
135static bool isDeviceRotated(int orientation) {
136    return orientation != DISPLAY_ORIENTATION_0 &&
137            orientation != DISPLAY_ORIENTATION_180;
138}
139
140/*
141 * Configures and starts the MediaCodec encoder.  Obtains an input surface
142 * from the codec.
143 */
144static status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
145        sp<IGraphicBufferProducer>* pBufferProducer) {
146    status_t err;
147
148    if (gVerbose) {
149        printf("Configuring recorder for %dx%d %s at %.2fMbps\n",
150                gVideoWidth, gVideoHeight, kMimeTypeAvc, gBitRate / 1000000.0);
151    }
152
153    sp<AMessage> format = new AMessage;
154    format->setInt32("width", gVideoWidth);
155    format->setInt32("height", gVideoHeight);
156    format->setString("mime", kMimeTypeAvc);
157    format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
158    format->setInt32("bitrate", gBitRate);
159    format->setFloat("frame-rate", displayFps);
160    format->setInt32("i-frame-interval", 10);
161
162    sp<ALooper> looper = new ALooper;
163    looper->setName("screenrecord_looper");
164    looper->start();
165    ALOGV("Creating codec");
166    sp<MediaCodec> codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
167    if (codec == NULL) {
168        fprintf(stderr, "ERROR: unable to create %s codec instance\n",
169                kMimeTypeAvc);
170        return UNKNOWN_ERROR;
171    }
172
173    err = codec->configure(format, NULL, NULL,
174            MediaCodec::CONFIGURE_FLAG_ENCODE);
175    if (err != NO_ERROR) {
176        fprintf(stderr, "ERROR: unable to configure %s codec at %dx%d (err=%d)\n",
177                kMimeTypeAvc, gVideoWidth, gVideoHeight, err);
178        codec->release();
179        return err;
180    }
181
182    ALOGV("Creating encoder input surface");
183    sp<IGraphicBufferProducer> bufferProducer;
184    err = codec->createInputSurface(&bufferProducer);
185    if (err != NO_ERROR) {
186        fprintf(stderr,
187            "ERROR: unable to create encoder input surface (err=%d)\n", err);
188        codec->release();
189        return err;
190    }
191
192    ALOGV("Starting codec");
193    err = codec->start();
194    if (err != NO_ERROR) {
195        fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err);
196        codec->release();
197        return err;
198    }
199
200    ALOGV("Codec prepared");
201    *pCodec = codec;
202    *pBufferProducer = bufferProducer;
203    return 0;
204}
205
206/*
207 * Sets the display projection, based on the display dimensions, video size,
208 * and device orientation.
209 */
210static status_t setDisplayProjection(const sp<IBinder>& dpy,
211        const DisplayInfo& mainDpyInfo) {
212    status_t err;
213
214    // Set the region of the layer stack we're interested in, which in our
215    // case is "all of it".  If the app is rotated (so that the width of the
216    // app is based on the height of the display), reverse width/height.
217    bool deviceRotated = isDeviceRotated(mainDpyInfo.orientation);
218    uint32_t sourceWidth, sourceHeight;
219    if (!deviceRotated) {
220        sourceWidth = mainDpyInfo.w;
221        sourceHeight = mainDpyInfo.h;
222    } else {
223        ALOGV("using rotated width/height");
224        sourceHeight = mainDpyInfo.w;
225        sourceWidth = mainDpyInfo.h;
226    }
227    Rect layerStackRect(sourceWidth, sourceHeight);
228
229    // We need to preserve the aspect ratio of the display.
230    float displayAspect = (float) sourceHeight / (float) sourceWidth;
231
232
233    // Set the way we map the output onto the display surface (which will
234    // be e.g. 1280x720 for a 720p video).  The rect is interpreted
235    // post-rotation, so if the display is rotated 90 degrees we need to
236    // "pre-rotate" it by flipping width/height, so that the orientation
237    // adjustment changes it back.
238    //
239    // We might want to encode a portrait display as landscape to use more
240    // of the screen real estate.  (If players respect a 90-degree rotation
241    // hint, we can essentially get a 720x1280 video instead of 1280x720.)
242    // In that case, we swap the configured video width/height and then
243    // supply a rotation value to the display projection.
244    uint32_t videoWidth, videoHeight;
245    uint32_t outWidth, outHeight;
246    if (!gRotate) {
247        videoWidth = gVideoWidth;
248        videoHeight = gVideoHeight;
249    } else {
250        videoWidth = gVideoHeight;
251        videoHeight = gVideoWidth;
252    }
253    if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
254        // limited by narrow width; reduce height
255        outWidth = videoWidth;
256        outHeight = (uint32_t)(videoWidth * displayAspect);
257    } else {
258        // limited by short height; restrict width
259        outHeight = videoHeight;
260        outWidth = (uint32_t)(videoHeight / displayAspect);
261    }
262    uint32_t offX, offY;
263    offX = (videoWidth - outWidth) / 2;
264    offY = (videoHeight - outHeight) / 2;
265    Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
266
267    if (gVerbose) {
268        if (gRotate) {
269            printf("Rotated content area is %ux%u at offset x=%d y=%d\n",
270                    outHeight, outWidth, offY, offX);
271        } else {
272            printf("Content area is %ux%u at offset x=%d y=%d\n",
273                    outWidth, outHeight, offX, offY);
274        }
275    }
276
277    SurfaceComposerClient::setDisplayProjection(dpy,
278            gRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
279            layerStackRect, displayRect);
280    return NO_ERROR;
281}
282
283/*
284 * Configures the virtual display.  When this completes, virtual display
285 * frames will start arriving from the buffer producer.
286 */
287static status_t prepareVirtualDisplay(const DisplayInfo& mainDpyInfo,
288        const sp<IGraphicBufferProducer>& bufferProducer,
289        sp<IBinder>* pDisplayHandle) {
290    sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
291            String8("ScreenRecorder"), false /*secure*/);
292
293    SurfaceComposerClient::openGlobalTransaction();
294    SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer);
295    setDisplayProjection(dpy, mainDpyInfo);
296    SurfaceComposerClient::setDisplayLayerStack(dpy, 0);    // default stack
297    SurfaceComposerClient::closeGlobalTransaction();
298
299    *pDisplayHandle = dpy;
300
301    return NO_ERROR;
302}
303
304/*
305 * Runs the MediaCodec encoder, sending the output to the MediaMuxer.  The
306 * input frames are coming from the virtual display as fast as SurfaceFlinger
307 * wants to send them.
308 *
309 * Exactly one of muxer or rawFp must be non-null.
310 *
311 * The muxer must *not* have been started before calling.
312 */
313static status_t runEncoder(const sp<MediaCodec>& encoder,
314        const sp<MediaMuxer>& muxer, FILE* rawFp, const sp<IBinder>& mainDpy,
315        const sp<IBinder>& virtualDpy, uint8_t orientation) {
316    static int kTimeout = 250000;   // be responsive on signal
317    status_t err;
318    ssize_t trackIdx = -1;
319    uint32_t debugNumFrames = 0;
320    int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
321    int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
322    DisplayInfo mainDpyInfo;
323
324    assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
325
326    Vector<sp<ABuffer> > buffers;
327    err = encoder->getOutputBuffers(&buffers);
328    if (err != NO_ERROR) {
329        fprintf(stderr, "Unable to get output buffers (err=%d)\n", err);
330        return err;
331    }
332
333    // This is set by the signal handler.
334    gStopRequested = false;
335
336    // Run until we're signaled.
337    while (!gStopRequested) {
338        size_t bufIndex, offset, size;
339        int64_t ptsUsec;
340        uint32_t flags;
341
342        if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
343            if (gVerbose) {
344                printf("Time limit reached\n");
345            }
346            break;
347        }
348
349        ALOGV("Calling dequeueOutputBuffer");
350        err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
351                &flags, kTimeout);
352        ALOGV("dequeueOutputBuffer returned %d", err);
353        switch (err) {
354        case NO_ERROR:
355            // got a buffer
356            if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
357                ALOGV("Got codec config buffer (%u bytes)", size);
358                if (muxer != NULL) {
359                    // ignore this -- we passed the CSD into MediaMuxer when
360                    // we got the format change notification
361                    size = 0;
362                }
363            }
364            if (size != 0) {
365                ALOGV("Got data in buffer %d, size=%d, pts=%lld",
366                        bufIndex, size, ptsUsec);
367
368                { // scope
369                    ATRACE_NAME("orientation");
370                    // Check orientation, update if it has changed.
371                    //
372                    // Polling for changes is inefficient and wrong, but the
373                    // useful stuff is hard to get at without a Dalvik VM.
374                    err = SurfaceComposerClient::getDisplayInfo(mainDpy,
375                            &mainDpyInfo);
376                    if (err != NO_ERROR) {
377                        ALOGW("getDisplayInfo(main) failed: %d", err);
378                    } else if (orientation != mainDpyInfo.orientation) {
379                        ALOGD("orientation changed, now %d", mainDpyInfo.orientation);
380                        SurfaceComposerClient::openGlobalTransaction();
381                        setDisplayProjection(virtualDpy, mainDpyInfo);
382                        SurfaceComposerClient::closeGlobalTransaction();
383                        orientation = mainDpyInfo.orientation;
384                    }
385                }
386
387                // If the virtual display isn't providing us with timestamps,
388                // use the current time.  This isn't great -- we could get
389                // decoded data in clusters -- but we're not expecting
390                // to hit this anyway.
391                if (ptsUsec == 0) {
392                    ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
393                }
394
395                if (muxer == NULL) {
396                    fwrite(buffers[bufIndex]->data(), 1, size, rawFp);
397                    // Flush the data immediately in case we're streaming.
398                    // We don't want to do this if all we've written is
399                    // the SPS/PPS data because mplayer gets confused.
400                    if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0) {
401                        fflush(rawFp);
402                    }
403                } else {
404                    // The MediaMuxer docs are unclear, but it appears that we
405                    // need to pass either the full set of BufferInfo flags, or
406                    // (flags & BUFFER_FLAG_SYNCFRAME).
407                    //
408                    // If this blocks for too long we could drop frames.  We may
409                    // want to queue these up and do them on a different thread.
410                    ATRACE_NAME("write sample");
411                    assert(trackIdx != -1);
412                    err = muxer->writeSampleData(buffers[bufIndex], trackIdx,
413                            ptsUsec, flags);
414                    if (err != NO_ERROR) {
415                        fprintf(stderr,
416                            "Failed writing data to muxer (err=%d)\n", err);
417                        return err;
418                    }
419                }
420                debugNumFrames++;
421            }
422            err = encoder->releaseOutputBuffer(bufIndex);
423            if (err != NO_ERROR) {
424                fprintf(stderr, "Unable to release output buffer (err=%d)\n",
425                        err);
426                return err;
427            }
428            if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
429                // Not expecting EOS from SurfaceFlinger.  Go with it.
430                ALOGI("Received end-of-stream");
431                gStopRequested = true;
432            }
433            break;
434        case -EAGAIN:                       // INFO_TRY_AGAIN_LATER
435            ALOGV("Got -EAGAIN, looping");
436            break;
437        case INFO_FORMAT_CHANGED:           // INFO_OUTPUT_FORMAT_CHANGED
438            {
439                // Format includes CSD, which we must provide to muxer.
440                ALOGV("Encoder format changed");
441                sp<AMessage> newFormat;
442                encoder->getOutputFormat(&newFormat);
443                if (muxer != NULL) {
444                    trackIdx = muxer->addTrack(newFormat);
445                    ALOGV("Starting muxer");
446                    err = muxer->start();
447                    if (err != NO_ERROR) {
448                        fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
449                        return err;
450                    }
451                }
452            }
453            break;
454        case INFO_OUTPUT_BUFFERS_CHANGED:   // INFO_OUTPUT_BUFFERS_CHANGED
455            // Not expected for an encoder; handle it anyway.
456            ALOGV("Encoder buffers changed");
457            err = encoder->getOutputBuffers(&buffers);
458            if (err != NO_ERROR) {
459                fprintf(stderr,
460                        "Unable to get new output buffers (err=%d)\n", err);
461                return err;
462            }
463            break;
464        case INVALID_OPERATION:
465            ALOGW("dequeueOutputBuffer returned INVALID_OPERATION");
466            return err;
467        default:
468            fprintf(stderr,
469                    "Got weird result %d from dequeueOutputBuffer\n", err);
470            return err;
471        }
472    }
473
474    ALOGV("Encoder stopping (req=%d)", gStopRequested);
475    if (gVerbose) {
476        printf("Encoder stopping; recorded %u frames in %lld seconds\n",
477                debugNumFrames, nanoseconds_to_seconds(
478                        systemTime(CLOCK_MONOTONIC) - startWhenNsec));
479    }
480    return NO_ERROR;
481}
482
483/*
484 * Raw H.264 byte stream output requested.  Send the output to stdout
485 * if desired.  If the output is a tty, reconfigure it to avoid the
486 * CRLF line termination that we see with "adb shell" commands.
487 */
488static FILE* prepareRawOutput(const char* fileName) {
489    FILE* rawFp = NULL;
490
491    if (strcmp(fileName, "-") == 0) {
492        if (gVerbose) {
493            fprintf(stderr, "ERROR: verbose output and '-' not compatible");
494            return NULL;
495        }
496        rawFp = stdout;
497    } else {
498        rawFp = fopen(fileName, "w");
499        if (rawFp == NULL) {
500            fprintf(stderr, "fopen raw failed: %s\n", strerror(errno));
501            return NULL;
502        }
503    }
504
505    int fd = fileno(rawFp);
506    if (isatty(fd)) {
507        // best effort -- reconfigure tty for "raw"
508        ALOGD("raw video output to tty (fd=%d)", fd);
509        struct termios term;
510        if (tcgetattr(fd, &term) == 0) {
511            cfmakeraw(&term);
512            if (tcsetattr(fd, TCSANOW, &term) == 0) {
513                ALOGD("tty successfully configured for raw");
514            }
515        }
516    }
517
518    return rawFp;
519}
520
521/*
522 * Main "do work" start point.
523 *
524 * Configures codec, muxer, and virtual display, then starts moving bits
525 * around.
526 */
527static status_t recordScreen(const char* fileName) {
528    status_t err;
529
530    // Configure signal handler.
531    err = configureSignals();
532    if (err != NO_ERROR) return err;
533
534    // Start Binder thread pool.  MediaCodec needs to be able to receive
535    // messages from mediaserver.
536    sp<ProcessState> self = ProcessState::self();
537    self->startThreadPool();
538
539    // Get main display parameters.
540    sp<IBinder> mainDpy = SurfaceComposerClient::getBuiltInDisplay(
541            ISurfaceComposer::eDisplayIdMain);
542    DisplayInfo mainDpyInfo;
543    err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo);
544    if (err != NO_ERROR) {
545        fprintf(stderr, "ERROR: unable to get display characteristics\n");
546        return err;
547    }
548    if (gVerbose) {
549        printf("Main display is %dx%d @%.2ffps (orientation=%u)\n",
550                mainDpyInfo.w, mainDpyInfo.h, mainDpyInfo.fps,
551                mainDpyInfo.orientation);
552    }
553
554    bool rotated = isDeviceRotated(mainDpyInfo.orientation);
555    if (gVideoWidth == 0) {
556        gVideoWidth = rotated ? mainDpyInfo.h : mainDpyInfo.w;
557    }
558    if (gVideoHeight == 0) {
559        gVideoHeight = rotated ? mainDpyInfo.w : mainDpyInfo.h;
560    }
561
562    // Configure and start the encoder.
563    sp<MediaCodec> encoder;
564    sp<FrameOutput> frameOutput;
565    sp<IGraphicBufferProducer> encoderInputSurface;
566    if (gOutputFormat != FORMAT_FRAMES) {
567        err = prepareEncoder(mainDpyInfo.fps, &encoder, &encoderInputSurface);
568
569        if (err != NO_ERROR && !gSizeSpecified) {
570            // fallback is defined for landscape; swap if we're in portrait
571            bool needSwap = gVideoWidth < gVideoHeight;
572            uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
573            uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
574            if (gVideoWidth != newWidth && gVideoHeight != newHeight) {
575                ALOGV("Retrying with 720p");
576                fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
577                        gVideoWidth, gVideoHeight, newWidth, newHeight);
578                gVideoWidth = newWidth;
579                gVideoHeight = newHeight;
580                err = prepareEncoder(mainDpyInfo.fps, &encoder,
581                        &encoderInputSurface);
582            }
583        }
584        if (err != NO_ERROR) return err;
585
586        // From here on, we must explicitly release() the encoder before it goes
587        // out of scope, or we will get an assertion failure from stagefright
588        // later on in a different thread.
589    } else {
590        // We're not using an encoder at all.  The "encoder input surface" we hand to
591        // SurfaceFlinger will just feed directly to us.
592        frameOutput = new FrameOutput();
593        err = frameOutput->createInputSurface(gVideoWidth, gVideoHeight, &encoderInputSurface);
594        if (err != NO_ERROR) {
595            return err;
596        }
597    }
598
599    // Draw the "info" page by rendering a frame with GLES and sending
600    // it directly to the encoder.
601    // TODO: consider displaying this as a regular layer to avoid b/11697754
602    if (gWantInfoScreen) {
603        Overlay::drawInfoPage(encoderInputSurface);
604    }
605
606    // Configure optional overlay.
607    sp<IGraphicBufferProducer> bufferProducer;
608    sp<Overlay> overlay;
609    if (gWantFrameTime) {
610        // Send virtual display frames to an external texture.
611        overlay = new Overlay();
612        err = overlay->start(encoderInputSurface, &bufferProducer);
613        if (err != NO_ERROR) {
614            if (encoder != NULL) encoder->release();
615            return err;
616        }
617        if (gVerbose) {
618            printf("Bugreport overlay created\n");
619        }
620    } else {
621        // Use the encoder's input surface as the virtual display surface.
622        bufferProducer = encoderInputSurface;
623    }
624
625    // Configure virtual display.
626    sp<IBinder> dpy;
627    err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy);
628    if (err != NO_ERROR) {
629        if (encoder != NULL) encoder->release();
630        return err;
631    }
632
633    sp<MediaMuxer> muxer = NULL;
634    FILE* rawFp = NULL;
635    switch (gOutputFormat) {
636        case FORMAT_MP4: {
637            // Configure muxer.  We have to wait for the CSD blob from the encoder
638            // before we can start it.
639            muxer = new MediaMuxer(fileName, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
640            if (gRotate) {
641                muxer->setOrientationHint(90);  // TODO: does this do anything?
642            }
643            break;
644        }
645        case FORMAT_H264:
646        case FORMAT_FRAMES: {
647            rawFp = prepareRawOutput(fileName);
648            if (rawFp == NULL) {
649                if (encoder != NULL) encoder->release();
650                return -1;
651            }
652            break;
653        }
654        default:
655            fprintf(stderr, "ERROR: unknown format %d\n", gOutputFormat);
656            abort();
657    }
658
659    if (gOutputFormat == FORMAT_FRAMES) {
660        // TODO: if we want to make this a proper feature, we should output
661        //       an outer header with version info.  Right now we never change
662        //       the frame size or format, so we could conceivably just send
663        //       the current frame header once and then follow it with an
664        //       unbroken stream of data.
665
666        // Make the EGL context current again.  This gets unhooked if we're
667        // using "--bugreport" mode.
668        // TODO: figure out if we can eliminate this
669        frameOutput->prepareToCopy();
670
671        while (!gStopRequested) {
672            // Poll for frames, the same way we do for MediaCodec.  We do
673            // all of the work on the main thread.
674            //
675            // Ideally we'd sleep indefinitely and wake when the
676            // stop was requested, but this will do for now.  (It almost
677            // works because wait() wakes when a signal hits, but we
678            // need to handle the edge cases.)
679            err = frameOutput->copyFrame(rawFp, 250000);
680            if (err == ETIMEDOUT) {
681                err = NO_ERROR;
682            } else if (err != NO_ERROR) {
683                ALOGE("Got error %d from copyFrame()", err);
684                break;
685            }
686        }
687    } else {
688        // Main encoder loop.
689        err = runEncoder(encoder, muxer, rawFp, mainDpy, dpy,
690                mainDpyInfo.orientation);
691        if (err != NO_ERROR) {
692            fprintf(stderr, "Encoder failed (err=%d)\n", err);
693            // fall through to cleanup
694        }
695
696        if (gVerbose) {
697            printf("Stopping encoder and muxer\n");
698        }
699    }
700
701    // Shut everything down, starting with the producer side.
702    encoderInputSurface = NULL;
703    SurfaceComposerClient::destroyDisplay(dpy);
704    if (overlay != NULL) overlay->stop();
705    if (encoder != NULL) encoder->stop();
706    if (muxer != NULL) {
707        // If we don't stop muxer explicitly, i.e. let the destructor run,
708        // it may hang (b/11050628).
709        muxer->stop();
710    } else if (rawFp != stdout) {
711        fclose(rawFp);
712    }
713    if (encoder != NULL) encoder->release();
714
715    return err;
716}
717
718/*
719 * Sends a broadcast to the media scanner to tell it about the new video.
720 *
721 * This is optional, but nice to have.
722 */
723static status_t notifyMediaScanner(const char* fileName) {
724    // need to do allocations before the fork()
725    String8 fileUrl("file://");
726    fileUrl.append(fileName);
727
728    const char* kCommand = "/system/bin/am";
729    const char* const argv[] = {
730            kCommand,
731            "broadcast",
732            "-a",
733            "android.intent.action.MEDIA_SCANNER_SCAN_FILE",
734            "-d",
735            fileUrl.string(),
736            NULL
737    };
738    if (gVerbose) {
739        printf("Executing:");
740        for (int i = 0; argv[i] != NULL; i++) {
741            printf(" %s", argv[i]);
742        }
743        putchar('\n');
744    }
745
746    pid_t pid = fork();
747    if (pid < 0) {
748        int err = errno;
749        ALOGW("fork() failed: %s", strerror(err));
750        return -err;
751    } else if (pid > 0) {
752        // parent; wait for the child, mostly to make the verbose-mode output
753        // look right, but also to check for and log failures
754        int status;
755        pid_t actualPid = TEMP_FAILURE_RETRY(waitpid(pid, &status, 0));
756        if (actualPid != pid) {
757            ALOGW("waitpid(%d) returned %d (errno=%d)", pid, actualPid, errno);
758        } else if (status != 0) {
759            ALOGW("'am broadcast' exited with status=%d", status);
760        } else {
761            ALOGV("'am broadcast' exited successfully");
762        }
763    } else {
764        if (!gVerbose) {
765            // non-verbose, suppress 'am' output
766            ALOGV("closing stdout/stderr in child");
767            int fd = open("/dev/null", O_WRONLY);
768            if (fd >= 0) {
769                dup2(fd, STDOUT_FILENO);
770                dup2(fd, STDERR_FILENO);
771                close(fd);
772            }
773        }
774        execv(kCommand, const_cast<char* const*>(argv));
775        ALOGE("execv(%s) failed: %s\n", kCommand, strerror(errno));
776        exit(1);
777    }
778    return NO_ERROR;
779}
780
781/*
782 * Parses a string of the form "1280x720".
783 *
784 * Returns true on success.
785 */
786static bool parseWidthHeight(const char* widthHeight, uint32_t* pWidth,
787        uint32_t* pHeight) {
788    long width, height;
789    char* end;
790
791    // Must specify base 10, or "0x0" gets parsed differently.
792    width = strtol(widthHeight, &end, 10);
793    if (end == widthHeight || *end != 'x' || *(end+1) == '\0') {
794        // invalid chars in width, or missing 'x', or missing height
795        return false;
796    }
797    height = strtol(end + 1, &end, 10);
798    if (*end != '\0') {
799        // invalid chars in height
800        return false;
801    }
802
803    *pWidth = width;
804    *pHeight = height;
805    return true;
806}
807
808/*
809 * Accepts a string with a bare number ("4000000") or with a single-character
810 * unit ("4m").
811 *
812 * Returns an error if parsing fails.
813 */
814static status_t parseValueWithUnit(const char* str, uint32_t* pValue) {
815    long value;
816    char* endptr;
817
818    value = strtol(str, &endptr, 10);
819    if (*endptr == '\0') {
820        // bare number
821        *pValue = value;
822        return NO_ERROR;
823    } else if (toupper(*endptr) == 'M' && *(endptr+1) == '\0') {
824        *pValue = value * 1000000;  // check for overflow?
825        return NO_ERROR;
826    } else {
827        fprintf(stderr, "Unrecognized value: %s\n", str);
828        return UNKNOWN_ERROR;
829    }
830}
831
832/*
833 * Dumps usage on stderr.
834 */
835static void usage() {
836    fprintf(stderr,
837        "Usage: screenrecord [options] <filename>\n"
838        "\n"
839        "Android screenrecord v%d.%d.  Records the device's display to a .mp4 file.\n"
840        "\n"
841        "Options:\n"
842        "--size WIDTHxHEIGHT\n"
843        "    Set the video size, e.g. \"1280x720\".  Default is the device's main\n"
844        "    display resolution (if supported), 1280x720 if not.  For best results,\n"
845        "    use a size supported by the AVC encoder.\n"
846        "--bit-rate RATE\n"
847        "    Set the video bit rate, in bits per second.  Value may be specified as\n"
848        "    bits or megabits, e.g. '4000000' is equivalent to '4M'.  Default %dMbps.\n"
849        "--bugreport\n"
850        "    Add additional information, such as a timestamp overlay, that is helpful\n"
851        "    in videos captured to illustrate bugs.\n"
852        "--time-limit TIME\n"
853        "    Set the maximum recording time, in seconds.  Default / maximum is %d.\n"
854        "--verbose\n"
855        "    Display interesting information on stdout.\n"
856        "--help\n"
857        "    Show this message.\n"
858        "\n"
859        "Recording continues until Ctrl-C is hit or the time limit is reached.\n"
860        "\n",
861        kVersionMajor, kVersionMinor, gBitRate / 1000000, gTimeLimitSec
862        );
863}
864
865/*
866 * Parses args and kicks things off.
867 */
868int main(int argc, char* const argv[]) {
869    static const struct option longOptions[] = {
870        { "help",               no_argument,        NULL, 'h' },
871        { "verbose",            no_argument,        NULL, 'v' },
872        { "size",               required_argument,  NULL, 's' },
873        { "bit-rate",           required_argument,  NULL, 'b' },
874        { "time-limit",         required_argument,  NULL, 't' },
875        { "bugreport",          no_argument,        NULL, 'u' },
876        // "unofficial" options
877        { "show-device-info",   no_argument,        NULL, 'i' },
878        { "show-frame-time",    no_argument,        NULL, 'f' },
879        { "rotate",             no_argument,        NULL, 'r' },
880        { "output-format",      required_argument,  NULL, 'o' },
881        { NULL,                 0,                  NULL, 0 }
882    };
883
884    while (true) {
885        int optionIndex = 0;
886        int ic = getopt_long(argc, argv, "", longOptions, &optionIndex);
887        if (ic == -1) {
888            break;
889        }
890
891        switch (ic) {
892        case 'h':
893            usage();
894            return 0;
895        case 'v':
896            gVerbose = true;
897            break;
898        case 's':
899            if (!parseWidthHeight(optarg, &gVideoWidth, &gVideoHeight)) {
900                fprintf(stderr, "Invalid size '%s', must be width x height\n",
901                        optarg);
902                return 2;
903            }
904            if (gVideoWidth == 0 || gVideoHeight == 0) {
905                fprintf(stderr,
906                    "Invalid size %ux%u, width and height may not be zero\n",
907                    gVideoWidth, gVideoHeight);
908                return 2;
909            }
910            gSizeSpecified = true;
911            break;
912        case 'b':
913            if (parseValueWithUnit(optarg, &gBitRate) != NO_ERROR) {
914                return 2;
915            }
916            if (gBitRate < kMinBitRate || gBitRate > kMaxBitRate) {
917                fprintf(stderr,
918                        "Bit rate %dbps outside acceptable range [%d,%d]\n",
919                        gBitRate, kMinBitRate, kMaxBitRate);
920                return 2;
921            }
922            break;
923        case 't':
924            gTimeLimitSec = atoi(optarg);
925            if (gTimeLimitSec == 0 || gTimeLimitSec > kMaxTimeLimitSec) {
926                fprintf(stderr,
927                        "Time limit %ds outside acceptable range [1,%d]\n",
928                        gTimeLimitSec, kMaxTimeLimitSec);
929                return 2;
930            }
931            break;
932        case 'u':
933            gWantInfoScreen = true;
934            gWantFrameTime = true;
935            break;
936        case 'i':
937            gWantInfoScreen = true;
938            break;
939        case 'f':
940            gWantFrameTime = true;
941            break;
942        case 'r':
943            // experimental feature
944            gRotate = true;
945            break;
946        case 'o':
947            if (strcmp(optarg, "mp4") == 0) {
948                gOutputFormat = FORMAT_MP4;
949            } else if (strcmp(optarg, "h264") == 0) {
950                gOutputFormat = FORMAT_H264;
951            } else if (strcmp(optarg, "frames") == 0) {
952                gOutputFormat = FORMAT_FRAMES;
953            } else {
954                fprintf(stderr, "Unknown format '%s'\n", optarg);
955                return 2;
956            }
957            break;
958        default:
959            if (ic != '?') {
960                fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
961            }
962            return 2;
963        }
964    }
965
966    if (optind != argc - 1) {
967        fprintf(stderr, "Must specify output file (see --help).\n");
968        return 2;
969    }
970
971    const char* fileName = argv[optind];
972    if (gOutputFormat == FORMAT_MP4) {
973        // MediaMuxer tries to create the file in the constructor, but we don't
974        // learn about the failure until muxer.start(), which returns a generic
975        // error code without logging anything.  We attempt to create the file
976        // now for better diagnostics.
977        int fd = open(fileName, O_CREAT | O_RDWR, 0644);
978        if (fd < 0) {
979            fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno));
980            return 1;
981        }
982        close(fd);
983    }
984
985    status_t err = recordScreen(fileName);
986    if (err == NO_ERROR) {
987        // Try to notify the media scanner.  Not fatal if this fails.
988        notifyMediaScanner(fileName);
989    }
990    ALOGD(err == NO_ERROR ? "success" : "failed");
991    return (int) err;
992}
993