1 /*
2 * Copyright 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <assert.h>
18 #include <ctype.h>
19 #include <fcntl.h>
20 #include <inttypes.h>
21 #include <getopt.h>
22 #include <signal.h>
23 #include <stdio.h>
24 #include <stdlib.h>
25 #include <string.h>
26 #include <sys/stat.h>
27 #include <sys/types.h>
28 #include <sys/wait.h>
29
30 #include <termios.h>
31 #include <unistd.h>
32
33 #define LOG_TAG "ScreenRecord"
34 #define ATRACE_TAG ATRACE_TAG_GRAPHICS
35 //#define LOG_NDEBUG 0
36 #include <utils/Log.h>
37
38 #include <binder/IPCThreadState.h>
39 #include <utils/Errors.h>
40 #include <utils/Timers.h>
41 #include <utils/Trace.h>
42
43 #include <gui/Surface.h>
44 #include <gui/SurfaceComposerClient.h>
45 #include <gui/ISurfaceComposer.h>
46 #include <ui/DisplayInfo.h>
47 #include <media/openmax/OMX_IVCommon.h>
48 #include <media/stagefright/foundation/ABuffer.h>
49 #include <media/stagefright/foundation/AMessage.h>
50 #include <media/stagefright/MediaCodec.h>
51 #include <media/stagefright/MediaErrors.h>
52 #include <media/stagefright/MediaMuxer.h>
53 #include <media/ICrypto.h>
54 #include <media/MediaCodecBuffer.h>
55
56 #include "screenrecord.h"
57 #include "Overlay.h"
58 #include "FrameOutput.h"
59
60 using namespace android;
61
62 static const uint32_t kMinBitRate = 100000; // 0.1Mbps
63 static const uint32_t kMaxBitRate = 200 * 1000000; // 200Mbps
64 static const uint32_t kMaxTimeLimitSec = 180; // 3 minutes
65 static const uint32_t kFallbackWidth = 1280; // 720p
66 static const uint32_t kFallbackHeight = 720;
67 static const char* kMimeTypeAvc = "video/avc";
68
69 // Command-line parameters.
70 static bool gVerbose = false; // chatty on stdout
71 static bool gRotate = false; // rotate 90 degrees
72 static bool gMonotonicTime = false; // use system monotonic time for timestamps
73 static enum {
74 FORMAT_MP4, FORMAT_H264, FORMAT_FRAMES, FORMAT_RAW_FRAMES
75 } gOutputFormat = FORMAT_MP4; // data format for output
76 static bool gSizeSpecified = false; // was size explicitly requested?
77 static bool gWantInfoScreen = false; // do we want initial info screen?
78 static bool gWantFrameTime = false; // do we want times on each frame?
79 static uint32_t gVideoWidth = 0; // default width+height
80 static uint32_t gVideoHeight = 0;
81 static uint32_t gBitRate = 4000000; // 4Mbps
82 static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
83
84 // Set by signal handler to stop recording.
85 static volatile bool gStopRequested = false;
86
87 // Previous signal handler state, restored after first hit.
88 static struct sigaction gOrigSigactionINT;
89 static struct sigaction gOrigSigactionHUP;
90
91
92 /*
93 * Catch keyboard interrupt signals. On receipt, the "stop requested"
94 * flag is raised, and the original handler is restored (so that, if
95 * we get stuck finishing, a second Ctrl-C will kill the process).
96 */
signalCatcher(int signum)97 static void signalCatcher(int signum)
98 {
99 gStopRequested = true;
100 switch (signum) {
101 case SIGINT:
102 case SIGHUP:
103 sigaction(SIGINT, &gOrigSigactionINT, NULL);
104 sigaction(SIGHUP, &gOrigSigactionHUP, NULL);
105 break;
106 default:
107 abort();
108 break;
109 }
110 }
111
112 /*
113 * Configures signal handlers. The previous handlers are saved.
114 *
115 * If the command is run from an interactive adb shell, we get SIGINT
116 * when Ctrl-C is hit. If we're run from the host, the local adb process
117 * gets the signal, and we get a SIGHUP when the terminal disconnects.
118 */
configureSignals()119 static status_t configureSignals() {
120 struct sigaction act;
121 memset(&act, 0, sizeof(act));
122 act.sa_handler = signalCatcher;
123 if (sigaction(SIGINT, &act, &gOrigSigactionINT) != 0) {
124 status_t err = -errno;
125 fprintf(stderr, "Unable to configure SIGINT handler: %s\n",
126 strerror(errno));
127 return err;
128 }
129 if (sigaction(SIGHUP, &act, &gOrigSigactionHUP) != 0) {
130 status_t err = -errno;
131 fprintf(stderr, "Unable to configure SIGHUP handler: %s\n",
132 strerror(errno));
133 return err;
134 }
135 return NO_ERROR;
136 }
137
138 /*
139 * Returns "true" if the device is rotated 90 degrees.
140 */
isDeviceRotated(int orientation)141 static bool isDeviceRotated(int orientation) {
142 return orientation != DISPLAY_ORIENTATION_0 &&
143 orientation != DISPLAY_ORIENTATION_180;
144 }
145
146 /*
147 * Configures and starts the MediaCodec encoder. Obtains an input surface
148 * from the codec.
149 */
prepareEncoder(float displayFps,sp<MediaCodec> * pCodec,sp<IGraphicBufferProducer> * pBufferProducer)150 static status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
151 sp<IGraphicBufferProducer>* pBufferProducer) {
152 status_t err;
153
154 if (gVerbose) {
155 printf("Configuring recorder for %dx%d %s at %.2fMbps\n",
156 gVideoWidth, gVideoHeight, kMimeTypeAvc, gBitRate / 1000000.0);
157 }
158
159 sp<AMessage> format = new AMessage;
160 format->setInt32("width", gVideoWidth);
161 format->setInt32("height", gVideoHeight);
162 format->setString("mime", kMimeTypeAvc);
163 format->setInt32("color-format", OMX_COLOR_FormatAndroidOpaque);
164 format->setInt32("bitrate", gBitRate);
165 format->setFloat("frame-rate", displayFps);
166 format->setInt32("i-frame-interval", 10);
167
168 sp<ALooper> looper = new ALooper;
169 looper->setName("screenrecord_looper");
170 looper->start();
171 ALOGV("Creating codec");
172 sp<MediaCodec> codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
173 if (codec == NULL) {
174 fprintf(stderr, "ERROR: unable to create %s codec instance\n",
175 kMimeTypeAvc);
176 return UNKNOWN_ERROR;
177 }
178
179 err = codec->configure(format, NULL, NULL,
180 MediaCodec::CONFIGURE_FLAG_ENCODE);
181 if (err != NO_ERROR) {
182 fprintf(stderr, "ERROR: unable to configure %s codec at %dx%d (err=%d)\n",
183 kMimeTypeAvc, gVideoWidth, gVideoHeight, err);
184 codec->release();
185 return err;
186 }
187
188 ALOGV("Creating encoder input surface");
189 sp<IGraphicBufferProducer> bufferProducer;
190 err = codec->createInputSurface(&bufferProducer);
191 if (err != NO_ERROR) {
192 fprintf(stderr,
193 "ERROR: unable to create encoder input surface (err=%d)\n", err);
194 codec->release();
195 return err;
196 }
197
198 ALOGV("Starting codec");
199 err = codec->start();
200 if (err != NO_ERROR) {
201 fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err);
202 codec->release();
203 return err;
204 }
205
206 ALOGV("Codec prepared");
207 *pCodec = codec;
208 *pBufferProducer = bufferProducer;
209 return 0;
210 }
211
212 /*
213 * Sets the display projection, based on the display dimensions, video size,
214 * and device orientation.
215 */
setDisplayProjection(const sp<IBinder> & dpy,const DisplayInfo & mainDpyInfo)216 static status_t setDisplayProjection(const sp<IBinder>& dpy,
217 const DisplayInfo& mainDpyInfo) {
218
219 // Set the region of the layer stack we're interested in, which in our
220 // case is "all of it". If the app is rotated (so that the width of the
221 // app is based on the height of the display), reverse width/height.
222 bool deviceRotated = isDeviceRotated(mainDpyInfo.orientation);
223 uint32_t sourceWidth, sourceHeight;
224 if (!deviceRotated) {
225 sourceWidth = mainDpyInfo.w;
226 sourceHeight = mainDpyInfo.h;
227 } else {
228 ALOGV("using rotated width/height");
229 sourceHeight = mainDpyInfo.w;
230 sourceWidth = mainDpyInfo.h;
231 }
232 Rect layerStackRect(sourceWidth, sourceHeight);
233
234 // We need to preserve the aspect ratio of the display.
235 float displayAspect = (float) sourceHeight / (float) sourceWidth;
236
237
238 // Set the way we map the output onto the display surface (which will
239 // be e.g. 1280x720 for a 720p video). The rect is interpreted
240 // post-rotation, so if the display is rotated 90 degrees we need to
241 // "pre-rotate" it by flipping width/height, so that the orientation
242 // adjustment changes it back.
243 //
244 // We might want to encode a portrait display as landscape to use more
245 // of the screen real estate. (If players respect a 90-degree rotation
246 // hint, we can essentially get a 720x1280 video instead of 1280x720.)
247 // In that case, we swap the configured video width/height and then
248 // supply a rotation value to the display projection.
249 uint32_t videoWidth, videoHeight;
250 uint32_t outWidth, outHeight;
251 if (!gRotate) {
252 videoWidth = gVideoWidth;
253 videoHeight = gVideoHeight;
254 } else {
255 videoWidth = gVideoHeight;
256 videoHeight = gVideoWidth;
257 }
258 if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
259 // limited by narrow width; reduce height
260 outWidth = videoWidth;
261 outHeight = (uint32_t)(videoWidth * displayAspect);
262 } else {
263 // limited by short height; restrict width
264 outHeight = videoHeight;
265 outWidth = (uint32_t)(videoHeight / displayAspect);
266 }
267 uint32_t offX, offY;
268 offX = (videoWidth - outWidth) / 2;
269 offY = (videoHeight - outHeight) / 2;
270 Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
271
272 if (gVerbose) {
273 if (gRotate) {
274 printf("Rotated content area is %ux%u at offset x=%d y=%d\n",
275 outHeight, outWidth, offY, offX);
276 } else {
277 printf("Content area is %ux%u at offset x=%d y=%d\n",
278 outWidth, outHeight, offX, offY);
279 }
280 }
281
282 SurfaceComposerClient::setDisplayProjection(dpy,
283 gRotate ? DISPLAY_ORIENTATION_90 : DISPLAY_ORIENTATION_0,
284 layerStackRect, displayRect);
285 return NO_ERROR;
286 }
287
288 /*
289 * Configures the virtual display. When this completes, virtual display
290 * frames will start arriving from the buffer producer.
291 */
prepareVirtualDisplay(const DisplayInfo & mainDpyInfo,const sp<IGraphicBufferProducer> & bufferProducer,sp<IBinder> * pDisplayHandle)292 static status_t prepareVirtualDisplay(const DisplayInfo& mainDpyInfo,
293 const sp<IGraphicBufferProducer>& bufferProducer,
294 sp<IBinder>* pDisplayHandle) {
295 sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
296 String8("ScreenRecorder"), false /*secure*/);
297
298 SurfaceComposerClient::openGlobalTransaction();
299 SurfaceComposerClient::setDisplaySurface(dpy, bufferProducer);
300 setDisplayProjection(dpy, mainDpyInfo);
301 SurfaceComposerClient::setDisplayLayerStack(dpy, 0); // default stack
302 SurfaceComposerClient::closeGlobalTransaction();
303
304 *pDisplayHandle = dpy;
305
306 return NO_ERROR;
307 }
308
309 /*
310 * Runs the MediaCodec encoder, sending the output to the MediaMuxer. The
311 * input frames are coming from the virtual display as fast as SurfaceFlinger
312 * wants to send them.
313 *
314 * Exactly one of muxer or rawFp must be non-null.
315 *
316 * The muxer must *not* have been started before calling.
317 */
runEncoder(const sp<MediaCodec> & encoder,const sp<MediaMuxer> & muxer,FILE * rawFp,const sp<IBinder> & mainDpy,const sp<IBinder> & virtualDpy,uint8_t orientation)318 static status_t runEncoder(const sp<MediaCodec>& encoder,
319 const sp<MediaMuxer>& muxer, FILE* rawFp, const sp<IBinder>& mainDpy,
320 const sp<IBinder>& virtualDpy, uint8_t orientation) {
321 static int kTimeout = 250000; // be responsive on signal
322 status_t err;
323 ssize_t trackIdx = -1;
324 uint32_t debugNumFrames = 0;
325 int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
326 int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
327 DisplayInfo mainDpyInfo;
328
329 assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
330
331 Vector<sp<MediaCodecBuffer> > buffers;
332 err = encoder->getOutputBuffers(&buffers);
333 if (err != NO_ERROR) {
334 fprintf(stderr, "Unable to get output buffers (err=%d)\n", err);
335 return err;
336 }
337
338 // Run until we're signaled.
339 while (!gStopRequested) {
340 size_t bufIndex, offset, size;
341 int64_t ptsUsec;
342 uint32_t flags;
343
344 if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
345 if (gVerbose) {
346 printf("Time limit reached\n");
347 }
348 break;
349 }
350
351 ALOGV("Calling dequeueOutputBuffer");
352 err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
353 &flags, kTimeout);
354 ALOGV("dequeueOutputBuffer returned %d", err);
355 switch (err) {
356 case NO_ERROR:
357 // got a buffer
358 if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
359 ALOGV("Got codec config buffer (%zu bytes)", size);
360 if (muxer != NULL) {
361 // ignore this -- we passed the CSD into MediaMuxer when
362 // we got the format change notification
363 size = 0;
364 }
365 }
366 if (size != 0) {
367 ALOGV("Got data in buffer %zu, size=%zu, pts=%" PRId64,
368 bufIndex, size, ptsUsec);
369
370 { // scope
371 ATRACE_NAME("orientation");
372 // Check orientation, update if it has changed.
373 //
374 // Polling for changes is inefficient and wrong, but the
375 // useful stuff is hard to get at without a Dalvik VM.
376 err = SurfaceComposerClient::getDisplayInfo(mainDpy,
377 &mainDpyInfo);
378 if (err != NO_ERROR) {
379 ALOGW("getDisplayInfo(main) failed: %d", err);
380 } else if (orientation != mainDpyInfo.orientation) {
381 ALOGD("orientation changed, now %d", mainDpyInfo.orientation);
382 SurfaceComposerClient::openGlobalTransaction();
383 setDisplayProjection(virtualDpy, mainDpyInfo);
384 SurfaceComposerClient::closeGlobalTransaction();
385 orientation = mainDpyInfo.orientation;
386 }
387 }
388
389 // If the virtual display isn't providing us with timestamps,
390 // use the current time. This isn't great -- we could get
391 // decoded data in clusters -- but we're not expecting
392 // to hit this anyway.
393 if (ptsUsec == 0) {
394 ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
395 }
396
397 if (muxer == NULL) {
398 fwrite(buffers[bufIndex]->data(), 1, size, rawFp);
399 // Flush the data immediately in case we're streaming.
400 // We don't want to do this if all we've written is
401 // the SPS/PPS data because mplayer gets confused.
402 if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0) {
403 fflush(rawFp);
404 }
405 } else {
406 // The MediaMuxer docs are unclear, but it appears that we
407 // need to pass either the full set of BufferInfo flags, or
408 // (flags & BUFFER_FLAG_SYNCFRAME).
409 //
410 // If this blocks for too long we could drop frames. We may
411 // want to queue these up and do them on a different thread.
412 ATRACE_NAME("write sample");
413 assert(trackIdx != -1);
414 // TODO
415 sp<ABuffer> buffer = new ABuffer(
416 buffers[bufIndex]->data(), buffers[bufIndex]->size());
417 err = muxer->writeSampleData(buffer, trackIdx,
418 ptsUsec, flags);
419 if (err != NO_ERROR) {
420 fprintf(stderr,
421 "Failed writing data to muxer (err=%d)\n", err);
422 return err;
423 }
424 }
425 debugNumFrames++;
426 }
427 err = encoder->releaseOutputBuffer(bufIndex);
428 if (err != NO_ERROR) {
429 fprintf(stderr, "Unable to release output buffer (err=%d)\n",
430 err);
431 return err;
432 }
433 if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
434 // Not expecting EOS from SurfaceFlinger. Go with it.
435 ALOGI("Received end-of-stream");
436 gStopRequested = true;
437 }
438 break;
439 case -EAGAIN: // INFO_TRY_AGAIN_LATER
440 ALOGV("Got -EAGAIN, looping");
441 break;
442 case INFO_FORMAT_CHANGED: // INFO_OUTPUT_FORMAT_CHANGED
443 {
444 // Format includes CSD, which we must provide to muxer.
445 ALOGV("Encoder format changed");
446 sp<AMessage> newFormat;
447 encoder->getOutputFormat(&newFormat);
448 if (muxer != NULL) {
449 trackIdx = muxer->addTrack(newFormat);
450 ALOGV("Starting muxer");
451 err = muxer->start();
452 if (err != NO_ERROR) {
453 fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
454 return err;
455 }
456 }
457 }
458 break;
459 case INFO_OUTPUT_BUFFERS_CHANGED: // INFO_OUTPUT_BUFFERS_CHANGED
460 // Not expected for an encoder; handle it anyway.
461 ALOGV("Encoder buffers changed");
462 err = encoder->getOutputBuffers(&buffers);
463 if (err != NO_ERROR) {
464 fprintf(stderr,
465 "Unable to get new output buffers (err=%d)\n", err);
466 return err;
467 }
468 break;
469 case INVALID_OPERATION:
470 ALOGW("dequeueOutputBuffer returned INVALID_OPERATION");
471 return err;
472 default:
473 fprintf(stderr,
474 "Got weird result %d from dequeueOutputBuffer\n", err);
475 return err;
476 }
477 }
478
479 ALOGV("Encoder stopping (req=%d)", gStopRequested);
480 if (gVerbose) {
481 printf("Encoder stopping; recorded %u frames in %" PRId64 " seconds\n",
482 debugNumFrames, nanoseconds_to_seconds(
483 systemTime(CLOCK_MONOTONIC) - startWhenNsec));
484 }
485 return NO_ERROR;
486 }
487
488 /*
489 * Raw H.264 byte stream output requested. Send the output to stdout
490 * if desired. If the output is a tty, reconfigure it to avoid the
491 * CRLF line termination that we see with "adb shell" commands.
492 */
prepareRawOutput(const char * fileName)493 static FILE* prepareRawOutput(const char* fileName) {
494 FILE* rawFp = NULL;
495
496 if (strcmp(fileName, "-") == 0) {
497 if (gVerbose) {
498 fprintf(stderr, "ERROR: verbose output and '-' not compatible");
499 return NULL;
500 }
501 rawFp = stdout;
502 } else {
503 rawFp = fopen(fileName, "w");
504 if (rawFp == NULL) {
505 fprintf(stderr, "fopen raw failed: %s\n", strerror(errno));
506 return NULL;
507 }
508 }
509
510 int fd = fileno(rawFp);
511 if (isatty(fd)) {
512 // best effort -- reconfigure tty for "raw"
513 ALOGD("raw video output to tty (fd=%d)", fd);
514 struct termios term;
515 if (tcgetattr(fd, &term) == 0) {
516 cfmakeraw(&term);
517 if (tcsetattr(fd, TCSANOW, &term) == 0) {
518 ALOGD("tty successfully configured for raw");
519 }
520 }
521 }
522
523 return rawFp;
524 }
525
526 /*
527 * Main "do work" start point.
528 *
529 * Configures codec, muxer, and virtual display, then starts moving bits
530 * around.
531 */
recordScreen(const char * fileName)532 static status_t recordScreen(const char* fileName) {
533 status_t err;
534
535 // Configure signal handler.
536 err = configureSignals();
537 if (err != NO_ERROR) return err;
538
539 // Start Binder thread pool. MediaCodec needs to be able to receive
540 // messages from mediaserver.
541 sp<ProcessState> self = ProcessState::self();
542 self->startThreadPool();
543
544 // Get main display parameters.
545 sp<IBinder> mainDpy = SurfaceComposerClient::getBuiltInDisplay(
546 ISurfaceComposer::eDisplayIdMain);
547 DisplayInfo mainDpyInfo;
548 err = SurfaceComposerClient::getDisplayInfo(mainDpy, &mainDpyInfo);
549 if (err != NO_ERROR) {
550 fprintf(stderr, "ERROR: unable to get display characteristics\n");
551 return err;
552 }
553 if (gVerbose) {
554 printf("Main display is %dx%d @%.2ffps (orientation=%u)\n",
555 mainDpyInfo.w, mainDpyInfo.h, mainDpyInfo.fps,
556 mainDpyInfo.orientation);
557 }
558
559 bool rotated = isDeviceRotated(mainDpyInfo.orientation);
560 if (gVideoWidth == 0) {
561 gVideoWidth = rotated ? mainDpyInfo.h : mainDpyInfo.w;
562 }
563 if (gVideoHeight == 0) {
564 gVideoHeight = rotated ? mainDpyInfo.w : mainDpyInfo.h;
565 }
566
567 // Configure and start the encoder.
568 sp<MediaCodec> encoder;
569 sp<FrameOutput> frameOutput;
570 sp<IGraphicBufferProducer> encoderInputSurface;
571 if (gOutputFormat != FORMAT_FRAMES && gOutputFormat != FORMAT_RAW_FRAMES) {
572 err = prepareEncoder(mainDpyInfo.fps, &encoder, &encoderInputSurface);
573
574 if (err != NO_ERROR && !gSizeSpecified) {
575 // fallback is defined for landscape; swap if we're in portrait
576 bool needSwap = gVideoWidth < gVideoHeight;
577 uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
578 uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
579 if (gVideoWidth != newWidth && gVideoHeight != newHeight) {
580 ALOGV("Retrying with 720p");
581 fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
582 gVideoWidth, gVideoHeight, newWidth, newHeight);
583 gVideoWidth = newWidth;
584 gVideoHeight = newHeight;
585 err = prepareEncoder(mainDpyInfo.fps, &encoder,
586 &encoderInputSurface);
587 }
588 }
589 if (err != NO_ERROR) return err;
590
591 // From here on, we must explicitly release() the encoder before it goes
592 // out of scope, or we will get an assertion failure from stagefright
593 // later on in a different thread.
594 } else {
595 // We're not using an encoder at all. The "encoder input surface" we hand to
596 // SurfaceFlinger will just feed directly to us.
597 frameOutput = new FrameOutput();
598 err = frameOutput->createInputSurface(gVideoWidth, gVideoHeight, &encoderInputSurface);
599 if (err != NO_ERROR) {
600 return err;
601 }
602 }
603
604 // Draw the "info" page by rendering a frame with GLES and sending
605 // it directly to the encoder.
606 // TODO: consider displaying this as a regular layer to avoid b/11697754
607 if (gWantInfoScreen) {
608 Overlay::drawInfoPage(encoderInputSurface);
609 }
610
611 // Configure optional overlay.
612 sp<IGraphicBufferProducer> bufferProducer;
613 sp<Overlay> overlay;
614 if (gWantFrameTime) {
615 // Send virtual display frames to an external texture.
616 overlay = new Overlay(gMonotonicTime);
617 err = overlay->start(encoderInputSurface, &bufferProducer);
618 if (err != NO_ERROR) {
619 if (encoder != NULL) encoder->release();
620 return err;
621 }
622 if (gVerbose) {
623 printf("Bugreport overlay created\n");
624 }
625 } else {
626 // Use the encoder's input surface as the virtual display surface.
627 bufferProducer = encoderInputSurface;
628 }
629
630 // Configure virtual display.
631 sp<IBinder> dpy;
632 err = prepareVirtualDisplay(mainDpyInfo, bufferProducer, &dpy);
633 if (err != NO_ERROR) {
634 if (encoder != NULL) encoder->release();
635 return err;
636 }
637
638 sp<MediaMuxer> muxer = NULL;
639 FILE* rawFp = NULL;
640 switch (gOutputFormat) {
641 case FORMAT_MP4: {
642 // Configure muxer. We have to wait for the CSD blob from the encoder
643 // before we can start it.
644 err = unlink(fileName);
645 if (err != 0 && errno != ENOENT) {
646 fprintf(stderr, "ERROR: couldn't remove existing file\n");
647 abort();
648 }
649 int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
650 if (fd < 0) {
651 fprintf(stderr, "ERROR: couldn't open file\n");
652 abort();
653 }
654 muxer = new MediaMuxer(fd, MediaMuxer::OUTPUT_FORMAT_MPEG_4);
655 close(fd);
656 if (gRotate) {
657 muxer->setOrientationHint(90); // TODO: does this do anything?
658 }
659 break;
660 }
661 case FORMAT_H264:
662 case FORMAT_FRAMES:
663 case FORMAT_RAW_FRAMES: {
664 rawFp = prepareRawOutput(fileName);
665 if (rawFp == NULL) {
666 if (encoder != NULL) encoder->release();
667 return -1;
668 }
669 break;
670 }
671 default:
672 fprintf(stderr, "ERROR: unknown format %d\n", gOutputFormat);
673 abort();
674 }
675
676 if (gOutputFormat == FORMAT_FRAMES || gOutputFormat == FORMAT_RAW_FRAMES) {
677 // TODO: if we want to make this a proper feature, we should output
678 // an outer header with version info. Right now we never change
679 // the frame size or format, so we could conceivably just send
680 // the current frame header once and then follow it with an
681 // unbroken stream of data.
682
683 // Make the EGL context current again. This gets unhooked if we're
684 // using "--bugreport" mode.
685 // TODO: figure out if we can eliminate this
686 frameOutput->prepareToCopy();
687
688 while (!gStopRequested) {
689 // Poll for frames, the same way we do for MediaCodec. We do
690 // all of the work on the main thread.
691 //
692 // Ideally we'd sleep indefinitely and wake when the
693 // stop was requested, but this will do for now. (It almost
694 // works because wait() wakes when a signal hits, but we
695 // need to handle the edge cases.)
696 bool rawFrames = gOutputFormat == FORMAT_RAW_FRAMES;
697 err = frameOutput->copyFrame(rawFp, 250000, rawFrames);
698 if (err == ETIMEDOUT) {
699 err = NO_ERROR;
700 } else if (err != NO_ERROR) {
701 ALOGE("Got error %d from copyFrame()", err);
702 break;
703 }
704 }
705 } else {
706 // Main encoder loop.
707 err = runEncoder(encoder, muxer, rawFp, mainDpy, dpy,
708 mainDpyInfo.orientation);
709 if (err != NO_ERROR) {
710 fprintf(stderr, "Encoder failed (err=%d)\n", err);
711 // fall through to cleanup
712 }
713
714 if (gVerbose) {
715 printf("Stopping encoder and muxer\n");
716 }
717 }
718
719 // Shut everything down, starting with the producer side.
720 encoderInputSurface = NULL;
721 SurfaceComposerClient::destroyDisplay(dpy);
722 if (overlay != NULL) overlay->stop();
723 if (encoder != NULL) encoder->stop();
724 if (muxer != NULL) {
725 // If we don't stop muxer explicitly, i.e. let the destructor run,
726 // it may hang (b/11050628).
727 err = muxer->stop();
728 } else if (rawFp != stdout) {
729 fclose(rawFp);
730 }
731 if (encoder != NULL) encoder->release();
732
733 return err;
734 }
735
736 /*
737 * Sends a broadcast to the media scanner to tell it about the new video.
738 *
739 * This is optional, but nice to have.
740 */
notifyMediaScanner(const char * fileName)741 static status_t notifyMediaScanner(const char* fileName) {
742 // need to do allocations before the fork()
743 String8 fileUrl("file://");
744 fileUrl.append(fileName);
745
746 const char* kCommand = "/system/bin/am";
747 const char* const argv[] = {
748 kCommand,
749 "broadcast",
750 "-a",
751 "android.intent.action.MEDIA_SCANNER_SCAN_FILE",
752 "-d",
753 fileUrl.string(),
754 NULL
755 };
756 if (gVerbose) {
757 printf("Executing:");
758 for (int i = 0; argv[i] != NULL; i++) {
759 printf(" %s", argv[i]);
760 }
761 putchar('\n');
762 }
763
764 pid_t pid = fork();
765 if (pid < 0) {
766 int err = errno;
767 ALOGW("fork() failed: %s", strerror(err));
768 return -err;
769 } else if (pid > 0) {
770 // parent; wait for the child, mostly to make the verbose-mode output
771 // look right, but also to check for and log failures
772 int status;
773 pid_t actualPid = TEMP_FAILURE_RETRY(waitpid(pid, &status, 0));
774 if (actualPid != pid) {
775 ALOGW("waitpid(%d) returned %d (errno=%d)", pid, actualPid, errno);
776 } else if (status != 0) {
777 ALOGW("'am broadcast' exited with status=%d", status);
778 } else {
779 ALOGV("'am broadcast' exited successfully");
780 }
781 } else {
782 if (!gVerbose) {
783 // non-verbose, suppress 'am' output
784 ALOGV("closing stdout/stderr in child");
785 int fd = open("/dev/null", O_WRONLY);
786 if (fd >= 0) {
787 dup2(fd, STDOUT_FILENO);
788 dup2(fd, STDERR_FILENO);
789 close(fd);
790 }
791 }
792 execv(kCommand, const_cast<char* const*>(argv));
793 ALOGE("execv(%s) failed: %s\n", kCommand, strerror(errno));
794 exit(1);
795 }
796 return NO_ERROR;
797 }
798
799 /*
800 * Parses a string of the form "1280x720".
801 *
802 * Returns true on success.
803 */
parseWidthHeight(const char * widthHeight,uint32_t * pWidth,uint32_t * pHeight)804 static bool parseWidthHeight(const char* widthHeight, uint32_t* pWidth,
805 uint32_t* pHeight) {
806 long width, height;
807 char* end;
808
809 // Must specify base 10, or "0x0" gets parsed differently.
810 width = strtol(widthHeight, &end, 10);
811 if (end == widthHeight || *end != 'x' || *(end+1) == '\0') {
812 // invalid chars in width, or missing 'x', or missing height
813 return false;
814 }
815 height = strtol(end + 1, &end, 10);
816 if (*end != '\0') {
817 // invalid chars in height
818 return false;
819 }
820
821 *pWidth = width;
822 *pHeight = height;
823 return true;
824 }
825
826 /*
827 * Accepts a string with a bare number ("4000000") or with a single-character
828 * unit ("4m").
829 *
830 * Returns an error if parsing fails.
831 */
parseValueWithUnit(const char * str,uint32_t * pValue)832 static status_t parseValueWithUnit(const char* str, uint32_t* pValue) {
833 long value;
834 char* endptr;
835
836 value = strtol(str, &endptr, 10);
837 if (*endptr == '\0') {
838 // bare number
839 *pValue = value;
840 return NO_ERROR;
841 } else if (toupper(*endptr) == 'M' && *(endptr+1) == '\0') {
842 *pValue = value * 1000000; // check for overflow?
843 return NO_ERROR;
844 } else {
845 fprintf(stderr, "Unrecognized value: %s\n", str);
846 return UNKNOWN_ERROR;
847 }
848 }
849
850 /*
851 * Dumps usage on stderr.
852 */
usage()853 static void usage() {
854 fprintf(stderr,
855 "Usage: screenrecord [options] <filename>\n"
856 "\n"
857 "Android screenrecord v%d.%d. Records the device's display to a .mp4 file.\n"
858 "\n"
859 "Options:\n"
860 "--size WIDTHxHEIGHT\n"
861 " Set the video size, e.g. \"1280x720\". Default is the device's main\n"
862 " display resolution (if supported), 1280x720 if not. For best results,\n"
863 " use a size supported by the AVC encoder.\n"
864 "--bit-rate RATE\n"
865 " Set the video bit rate, in bits per second. Value may be specified as\n"
866 " bits or megabits, e.g. '4000000' is equivalent to '4M'. Default %dMbps.\n"
867 "--bugreport\n"
868 " Add additional information, such as a timestamp overlay, that is helpful\n"
869 " in videos captured to illustrate bugs.\n"
870 "--time-limit TIME\n"
871 " Set the maximum recording time, in seconds. Default / maximum is %d.\n"
872 "--verbose\n"
873 " Display interesting information on stdout.\n"
874 "--help\n"
875 " Show this message.\n"
876 "\n"
877 "Recording continues until Ctrl-C is hit or the time limit is reached.\n"
878 "\n",
879 kVersionMajor, kVersionMinor, gBitRate / 1000000, gTimeLimitSec
880 );
881 }
882
883 /*
884 * Parses args and kicks things off.
885 */
main(int argc,char * const argv[])886 int main(int argc, char* const argv[]) {
887 static const struct option longOptions[] = {
888 { "help", no_argument, NULL, 'h' },
889 { "verbose", no_argument, NULL, 'v' },
890 { "size", required_argument, NULL, 's' },
891 { "bit-rate", required_argument, NULL, 'b' },
892 { "time-limit", required_argument, NULL, 't' },
893 { "bugreport", no_argument, NULL, 'u' },
894 // "unofficial" options
895 { "show-device-info", no_argument, NULL, 'i' },
896 { "show-frame-time", no_argument, NULL, 'f' },
897 { "rotate", no_argument, NULL, 'r' },
898 { "output-format", required_argument, NULL, 'o' },
899 { "monotonic-time", no_argument, NULL, 'm' },
900 { NULL, 0, NULL, 0 }
901 };
902
903 while (true) {
904 int optionIndex = 0;
905 int ic = getopt_long(argc, argv, "", longOptions, &optionIndex);
906 if (ic == -1) {
907 break;
908 }
909
910 switch (ic) {
911 case 'h':
912 usage();
913 return 0;
914 case 'v':
915 gVerbose = true;
916 break;
917 case 's':
918 if (!parseWidthHeight(optarg, &gVideoWidth, &gVideoHeight)) {
919 fprintf(stderr, "Invalid size '%s', must be width x height\n",
920 optarg);
921 return 2;
922 }
923 if (gVideoWidth == 0 || gVideoHeight == 0) {
924 fprintf(stderr,
925 "Invalid size %ux%u, width and height may not be zero\n",
926 gVideoWidth, gVideoHeight);
927 return 2;
928 }
929 gSizeSpecified = true;
930 break;
931 case 'b':
932 if (parseValueWithUnit(optarg, &gBitRate) != NO_ERROR) {
933 return 2;
934 }
935 if (gBitRate < kMinBitRate || gBitRate > kMaxBitRate) {
936 fprintf(stderr,
937 "Bit rate %dbps outside acceptable range [%d,%d]\n",
938 gBitRate, kMinBitRate, kMaxBitRate);
939 return 2;
940 }
941 break;
942 case 't':
943 gTimeLimitSec = atoi(optarg);
944 if (gTimeLimitSec == 0 || gTimeLimitSec > kMaxTimeLimitSec) {
945 fprintf(stderr,
946 "Time limit %ds outside acceptable range [1,%d]\n",
947 gTimeLimitSec, kMaxTimeLimitSec);
948 return 2;
949 }
950 break;
951 case 'u':
952 gWantInfoScreen = true;
953 gWantFrameTime = true;
954 break;
955 case 'i':
956 gWantInfoScreen = true;
957 break;
958 case 'f':
959 gWantFrameTime = true;
960 break;
961 case 'r':
962 // experimental feature
963 gRotate = true;
964 break;
965 case 'o':
966 if (strcmp(optarg, "mp4") == 0) {
967 gOutputFormat = FORMAT_MP4;
968 } else if (strcmp(optarg, "h264") == 0) {
969 gOutputFormat = FORMAT_H264;
970 } else if (strcmp(optarg, "frames") == 0) {
971 gOutputFormat = FORMAT_FRAMES;
972 } else if (strcmp(optarg, "raw-frames") == 0) {
973 gOutputFormat = FORMAT_RAW_FRAMES;
974 } else {
975 fprintf(stderr, "Unknown format '%s'\n", optarg);
976 return 2;
977 }
978 break;
979 case 'm':
980 gMonotonicTime = true;
981 break;
982 default:
983 if (ic != '?') {
984 fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
985 }
986 return 2;
987 }
988 }
989
990 if (optind != argc - 1) {
991 fprintf(stderr, "Must specify output file (see --help).\n");
992 return 2;
993 }
994
995 const char* fileName = argv[optind];
996 if (gOutputFormat == FORMAT_MP4) {
997 // MediaMuxer tries to create the file in the constructor, but we don't
998 // learn about the failure until muxer.start(), which returns a generic
999 // error code without logging anything. We attempt to create the file
1000 // now for better diagnostics.
1001 int fd = open(fileName, O_CREAT | O_RDWR, 0644);
1002 if (fd < 0) {
1003 fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno));
1004 return 1;
1005 }
1006 close(fd);
1007 }
1008
1009 status_t err = recordScreen(fileName);
1010 if (err == NO_ERROR) {
1011 // Try to notify the media scanner. Not fatal if this fails.
1012 notifyMediaScanner(fileName);
1013 }
1014 ALOGD(err == NO_ERROR ? "success" : "failed");
1015 return (int) err;
1016 }
1017