1 /*
2 * Copyright 2013 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <assert.h>
18 #include <ctype.h>
19 #include <fcntl.h>
20 #include <inttypes.h>
21 #include <getopt.h>
22 #include <signal.h>
23 #include <stdio.h>
24 #include <stdlib.h>
25 #include <string.h>
26 #include <sys/stat.h>
27 #include <sys/types.h>
28 #include <sys/wait.h>
29
30 #include <termios.h>
31 #include <unistd.h>
32
33 #define LOG_TAG "ScreenRecord"
34 #define ATRACE_TAG ATRACE_TAG_GRAPHICS
35 //#define LOG_NDEBUG 0
36 #include <utils/Log.h>
37
38 #include <binder/IPCThreadState.h>
39 #include <utils/Errors.h>
40 #include <utils/SystemClock.h>
41 #include <utils/Timers.h>
42 #include <utils/Trace.h>
43
44 #include <gui/ISurfaceComposer.h>
45 #include <gui/Surface.h>
46 #include <gui/SurfaceComposerClient.h>
47 #include <gui/ISurfaceComposer.h>
48 #include <media/MediaCodecBuffer.h>
49 #include <media/NdkMediaCodec.h>
50 #include <media/NdkMediaFormatPriv.h>
51 #include <media/NdkMediaMuxer.h>
52 #include <media/openmax/OMX_IVCommon.h>
53 #include <media/stagefright/MediaCodec.h>
54 #include <media/stagefright/MediaCodecConstants.h>
55 #include <media/stagefright/MediaErrors.h>
56 #include <media/stagefright/PersistentSurface.h>
57 #include <media/stagefright/foundation/ABuffer.h>
58 #include <media/stagefright/foundation/AMessage.h>
59 #include <mediadrm/ICrypto.h>
60 #include <ui/DisplayConfig.h>
61 #include <ui/DisplayState.h>
62
63 #include "screenrecord.h"
64 #include "Overlay.h"
65 #include "FrameOutput.h"
66
67 using android::ABuffer;
68 using android::ALooper;
69 using android::AMessage;
70 using android::AString;
71 using android::DisplayConfig;
72 using android::FrameOutput;
73 using android::IBinder;
74 using android::IGraphicBufferProducer;
75 using android::ISurfaceComposer;
76 using android::MediaCodec;
77 using android::MediaCodecBuffer;
78 using android::Overlay;
79 using android::PersistentSurface;
80 using android::PhysicalDisplayId;
81 using android::ProcessState;
82 using android::Rect;
83 using android::String8;
84 using android::SurfaceComposerClient;
85 using android::Vector;
86 using android::sp;
87 using android::status_t;
88
89 using android::INVALID_OPERATION;
90 using android::NAME_NOT_FOUND;
91 using android::NO_ERROR;
92 using android::UNKNOWN_ERROR;
93
94 namespace ui = android::ui;
95
96 static const uint32_t kMinBitRate = 100000; // 0.1Mbps
97 static const uint32_t kMaxBitRate = 200 * 1000000; // 200Mbps
98 static const uint32_t kMaxTimeLimitSec = 180; // 3 minutes
99 static const uint32_t kFallbackWidth = 1280; // 720p
100 static const uint32_t kFallbackHeight = 720;
101 static const char* kMimeTypeAvc = "video/avc";
102 static const char* kMimeTypeApplicationOctetstream = "application/octet-stream";
103 static const char* kWinscopeMagicString = "#VV1NSC0PET1ME!#";
104
105 // Command-line parameters.
106 static bool gVerbose = false; // chatty on stdout
107 static bool gRotate = false; // rotate 90 degrees
108 static bool gMonotonicTime = false; // use system monotonic time for timestamps
109 static bool gPersistentSurface = false; // use persistent surface
110 static enum {
111 FORMAT_MP4, FORMAT_H264, FORMAT_WEBM, FORMAT_3GPP, FORMAT_FRAMES, FORMAT_RAW_FRAMES
112 } gOutputFormat = FORMAT_MP4; // data format for output
113 static AString gCodecName = ""; // codec name override
114 static bool gSizeSpecified = false; // was size explicitly requested?
115 static bool gWantInfoScreen = false; // do we want initial info screen?
116 static bool gWantFrameTime = false; // do we want times on each frame?
117 static uint32_t gVideoWidth = 0; // default width+height
118 static uint32_t gVideoHeight = 0;
119 static uint32_t gBitRate = 20000000; // 20Mbps
120 static uint32_t gTimeLimitSec = kMaxTimeLimitSec;
121 static uint32_t gBframes = 0;
122 static PhysicalDisplayId gPhysicalDisplayId;
123 // Set by signal handler to stop recording.
124 static volatile bool gStopRequested = false;
125
126 // Previous signal handler state, restored after first hit.
127 static struct sigaction gOrigSigactionINT;
128 static struct sigaction gOrigSigactionHUP;
129
130
131 /*
132 * Catch keyboard interrupt signals. On receipt, the "stop requested"
133 * flag is raised, and the original handler is restored (so that, if
134 * we get stuck finishing, a second Ctrl-C will kill the process).
135 */
signalCatcher(int signum)136 static void signalCatcher(int signum)
137 {
138 gStopRequested = true;
139 switch (signum) {
140 case SIGINT:
141 case SIGHUP:
142 sigaction(SIGINT, &gOrigSigactionINT, NULL);
143 sigaction(SIGHUP, &gOrigSigactionHUP, NULL);
144 break;
145 default:
146 abort();
147 break;
148 }
149 }
150
151 /*
152 * Configures signal handlers. The previous handlers are saved.
153 *
154 * If the command is run from an interactive adb shell, we get SIGINT
155 * when Ctrl-C is hit. If we're run from the host, the local adb process
156 * gets the signal, and we get a SIGHUP when the terminal disconnects.
157 */
configureSignals()158 static status_t configureSignals() {
159 struct sigaction act;
160 memset(&act, 0, sizeof(act));
161 act.sa_handler = signalCatcher;
162 if (sigaction(SIGINT, &act, &gOrigSigactionINT) != 0) {
163 status_t err = -errno;
164 fprintf(stderr, "Unable to configure SIGINT handler: %s\n",
165 strerror(errno));
166 return err;
167 }
168 if (sigaction(SIGHUP, &act, &gOrigSigactionHUP) != 0) {
169 status_t err = -errno;
170 fprintf(stderr, "Unable to configure SIGHUP handler: %s\n",
171 strerror(errno));
172 return err;
173 }
174 signal(SIGPIPE, SIG_IGN);
175 return NO_ERROR;
176 }
177
178 /*
179 * Configures and starts the MediaCodec encoder. Obtains an input surface
180 * from the codec.
181 */
prepareEncoder(float displayFps,sp<MediaCodec> * pCodec,sp<IGraphicBufferProducer> * pBufferProducer)182 static status_t prepareEncoder(float displayFps, sp<MediaCodec>* pCodec,
183 sp<IGraphicBufferProducer>* pBufferProducer) {
184 status_t err;
185
186 if (gVerbose) {
187 printf("Configuring recorder for %dx%d %s at %.2fMbps\n",
188 gVideoWidth, gVideoHeight, kMimeTypeAvc, gBitRate / 1000000.0);
189 fflush(stdout);
190 }
191
192 sp<AMessage> format = new AMessage;
193 format->setInt32(KEY_WIDTH, gVideoWidth);
194 format->setInt32(KEY_HEIGHT, gVideoHeight);
195 format->setString(KEY_MIME, kMimeTypeAvc);
196 format->setInt32(KEY_COLOR_FORMAT, OMX_COLOR_FormatAndroidOpaque);
197 format->setInt32(KEY_BIT_RATE, gBitRate);
198 format->setFloat(KEY_FRAME_RATE, displayFps);
199 format->setInt32(KEY_I_FRAME_INTERVAL, 10);
200 format->setInt32(KEY_MAX_B_FRAMES, gBframes);
201 if (gBframes > 0) {
202 format->setInt32(KEY_PROFILE, AVCProfileMain);
203 format->setInt32(KEY_LEVEL, AVCLevel41);
204 }
205
206 sp<android::ALooper> looper = new android::ALooper;
207 looper->setName("screenrecord_looper");
208 looper->start();
209 ALOGV("Creating codec");
210 sp<MediaCodec> codec;
211 if (gCodecName.empty()) {
212 codec = MediaCodec::CreateByType(looper, kMimeTypeAvc, true);
213 if (codec == NULL) {
214 fprintf(stderr, "ERROR: unable to create %s codec instance\n",
215 kMimeTypeAvc);
216 return UNKNOWN_ERROR;
217 }
218 } else {
219 codec = MediaCodec::CreateByComponentName(looper, gCodecName);
220 if (codec == NULL) {
221 fprintf(stderr, "ERROR: unable to create %s codec instance\n",
222 gCodecName.c_str());
223 return UNKNOWN_ERROR;
224 }
225 }
226
227 err = codec->configure(format, NULL, NULL,
228 MediaCodec::CONFIGURE_FLAG_ENCODE);
229 if (err != NO_ERROR) {
230 fprintf(stderr, "ERROR: unable to configure %s codec at %dx%d (err=%d)\n",
231 kMimeTypeAvc, gVideoWidth, gVideoHeight, err);
232 codec->release();
233 return err;
234 }
235
236 ALOGV("Creating encoder input surface");
237 sp<IGraphicBufferProducer> bufferProducer;
238 if (gPersistentSurface) {
239 sp<PersistentSurface> surface = MediaCodec::CreatePersistentInputSurface();
240 bufferProducer = surface->getBufferProducer();
241 err = codec->setInputSurface(surface);
242 } else {
243 err = codec->createInputSurface(&bufferProducer);
244 }
245 if (err != NO_ERROR) {
246 fprintf(stderr,
247 "ERROR: unable to %s encoder input surface (err=%d)\n",
248 gPersistentSurface ? "set" : "create",
249 err);
250 codec->release();
251 return err;
252 }
253
254 ALOGV("Starting codec");
255 err = codec->start();
256 if (err != NO_ERROR) {
257 fprintf(stderr, "ERROR: unable to start codec (err=%d)\n", err);
258 codec->release();
259 return err;
260 }
261
262 ALOGV("Codec prepared");
263 *pCodec = codec;
264 *pBufferProducer = bufferProducer;
265 return 0;
266 }
267
268 /*
269 * Sets the display projection, based on the display dimensions, video size,
270 * and device orientation.
271 */
setDisplayProjection(SurfaceComposerClient::Transaction & t,const sp<IBinder> & dpy,const ui::DisplayState & displayState)272 static status_t setDisplayProjection(
273 SurfaceComposerClient::Transaction& t,
274 const sp<IBinder>& dpy,
275 const ui::DisplayState& displayState) {
276 const ui::Size& viewport = displayState.viewport;
277
278 // Set the region of the layer stack we're interested in, which in our
279 // case is "all of it".
280 Rect layerStackRect(viewport);
281
282 // We need to preserve the aspect ratio of the display.
283 float displayAspect = viewport.getHeight() / static_cast<float>(viewport.getWidth());
284
285
286 // Set the way we map the output onto the display surface (which will
287 // be e.g. 1280x720 for a 720p video). The rect is interpreted
288 // post-rotation, so if the display is rotated 90 degrees we need to
289 // "pre-rotate" it by flipping width/height, so that the orientation
290 // adjustment changes it back.
291 //
292 // We might want to encode a portrait display as landscape to use more
293 // of the screen real estate. (If players respect a 90-degree rotation
294 // hint, we can essentially get a 720x1280 video instead of 1280x720.)
295 // In that case, we swap the configured video width/height and then
296 // supply a rotation value to the display projection.
297 uint32_t videoWidth, videoHeight;
298 uint32_t outWidth, outHeight;
299 if (!gRotate) {
300 videoWidth = gVideoWidth;
301 videoHeight = gVideoHeight;
302 } else {
303 videoWidth = gVideoHeight;
304 videoHeight = gVideoWidth;
305 }
306 if (videoHeight > (uint32_t)(videoWidth * displayAspect)) {
307 // limited by narrow width; reduce height
308 outWidth = videoWidth;
309 outHeight = (uint32_t)(videoWidth * displayAspect);
310 } else {
311 // limited by short height; restrict width
312 outHeight = videoHeight;
313 outWidth = (uint32_t)(videoHeight / displayAspect);
314 }
315 uint32_t offX, offY;
316 offX = (videoWidth - outWidth) / 2;
317 offY = (videoHeight - outHeight) / 2;
318 Rect displayRect(offX, offY, offX + outWidth, offY + outHeight);
319
320 if (gVerbose) {
321 if (gRotate) {
322 printf("Rotated content area is %ux%u at offset x=%d y=%d\n",
323 outHeight, outWidth, offY, offX);
324 fflush(stdout);
325 } else {
326 printf("Content area is %ux%u at offset x=%d y=%d\n",
327 outWidth, outHeight, offX, offY);
328 fflush(stdout);
329 }
330 }
331
332 t.setDisplayProjection(dpy,
333 gRotate ? ui::ROTATION_90 : ui::ROTATION_0,
334 layerStackRect, displayRect);
335 return NO_ERROR;
336 }
337
338 /*
339 * Configures the virtual display. When this completes, virtual display
340 * frames will start arriving from the buffer producer.
341 */
prepareVirtualDisplay(const ui::DisplayState & displayState,const sp<IGraphicBufferProducer> & bufferProducer,sp<IBinder> * pDisplayHandle)342 static status_t prepareVirtualDisplay(
343 const ui::DisplayState& displayState,
344 const sp<IGraphicBufferProducer>& bufferProducer,
345 sp<IBinder>* pDisplayHandle) {
346 sp<IBinder> dpy = SurfaceComposerClient::createDisplay(
347 String8("ScreenRecorder"), false /*secure*/);
348 SurfaceComposerClient::Transaction t;
349 t.setDisplaySurface(dpy, bufferProducer);
350 setDisplayProjection(t, dpy, displayState);
351 t.setDisplayLayerStack(dpy, displayState.layerStack);
352 t.apply();
353
354 *pDisplayHandle = dpy;
355
356 return NO_ERROR;
357 }
358
359 /*
360 * Writes an unsigned integer byte-by-byte in little endian order regardless
361 * of the platform endianness.
362 */
363 template <typename UINT>
writeValueLE(UINT value,uint8_t * buffer)364 static void writeValueLE(UINT value, uint8_t* buffer) {
365 for (int i = 0; i < sizeof(UINT); ++i) {
366 buffer[i] = static_cast<uint8_t>(value);
367 value >>= 8;
368 }
369 }
370
371 /*
372 * Saves frames presentation time relative to the elapsed realtime clock in microseconds
373 * preceded by a Winscope magic string and frame count to a metadata track.
374 * This metadata is used by the Winscope tool to sync video with SurfaceFlinger
375 * and WindowManager traces.
376 *
377 * The metadata is written as a binary array as follows:
378 * - winscope magic string (kWinscopeMagicString constant), without trailing null char,
379 * - the number of recorded frames (as little endian uint32),
380 * - for every frame its presentation time relative to the elapsed realtime clock in microseconds
381 * (as little endian uint64).
382 */
writeWinscopeMetadata(const Vector<int64_t> & timestamps,const ssize_t metaTrackIdx,AMediaMuxer * muxer)383 static status_t writeWinscopeMetadata(const Vector<int64_t>& timestamps,
384 const ssize_t metaTrackIdx, AMediaMuxer *muxer) {
385 ALOGV("Writing metadata");
386 int64_t systemTimeToElapsedTimeOffsetMicros = (android::elapsedRealtimeNano()
387 - systemTime(SYSTEM_TIME_MONOTONIC)) / 1000;
388 sp<ABuffer> buffer = new ABuffer(timestamps.size() * sizeof(int64_t)
389 + sizeof(uint32_t) + strlen(kWinscopeMagicString));
390 uint8_t* pos = buffer->data();
391 strcpy(reinterpret_cast<char*>(pos), kWinscopeMagicString);
392 pos += strlen(kWinscopeMagicString);
393 writeValueLE<uint32_t>(timestamps.size(), pos);
394 pos += sizeof(uint32_t);
395 for (size_t idx = 0; idx < timestamps.size(); ++idx) {
396 writeValueLE<uint64_t>(static_cast<uint64_t>(timestamps[idx]
397 + systemTimeToElapsedTimeOffsetMicros), pos);
398 pos += sizeof(uint64_t);
399 }
400 AMediaCodecBufferInfo bufferInfo = {
401 0,
402 static_cast<int32_t>(buffer->size()),
403 timestamps[0],
404 0
405 };
406 return AMediaMuxer_writeSampleData(muxer, metaTrackIdx, buffer->data(), &bufferInfo);
407 }
408
409 /*
410 * Runs the MediaCodec encoder, sending the output to the MediaMuxer. The
411 * input frames are coming from the virtual display as fast as SurfaceFlinger
412 * wants to send them.
413 *
414 * Exactly one of muxer or rawFp must be non-null.
415 *
416 * The muxer must *not* have been started before calling.
417 */
runEncoder(const sp<MediaCodec> & encoder,AMediaMuxer * muxer,FILE * rawFp,const sp<IBinder> & display,const sp<IBinder> & virtualDpy,ui::Rotation orientation)418 static status_t runEncoder(const sp<MediaCodec>& encoder,
419 AMediaMuxer *muxer, FILE* rawFp, const sp<IBinder>& display,
420 const sp<IBinder>& virtualDpy, ui::Rotation orientation) {
421 static int kTimeout = 250000; // be responsive on signal
422 status_t err;
423 ssize_t trackIdx = -1;
424 ssize_t metaTrackIdx = -1;
425 uint32_t debugNumFrames = 0;
426 int64_t startWhenNsec = systemTime(CLOCK_MONOTONIC);
427 int64_t endWhenNsec = startWhenNsec + seconds_to_nanoseconds(gTimeLimitSec);
428 Vector<int64_t> timestamps;
429 bool firstFrame = true;
430
431 assert((rawFp == NULL && muxer != NULL) || (rawFp != NULL && muxer == NULL));
432
433 Vector<sp<MediaCodecBuffer> > buffers;
434 err = encoder->getOutputBuffers(&buffers);
435 if (err != NO_ERROR) {
436 fprintf(stderr, "Unable to get output buffers (err=%d)\n", err);
437 return err;
438 }
439
440 // Run until we're signaled.
441 while (!gStopRequested) {
442 size_t bufIndex, offset, size;
443 int64_t ptsUsec;
444 uint32_t flags;
445
446 if (firstFrame) {
447 ATRACE_NAME("first_frame");
448 firstFrame = false;
449 }
450
451 if (systemTime(CLOCK_MONOTONIC) > endWhenNsec) {
452 if (gVerbose) {
453 printf("Time limit reached\n");
454 fflush(stdout);
455 }
456 break;
457 }
458
459 ALOGV("Calling dequeueOutputBuffer");
460 err = encoder->dequeueOutputBuffer(&bufIndex, &offset, &size, &ptsUsec,
461 &flags, kTimeout);
462 ALOGV("dequeueOutputBuffer returned %d", err);
463 switch (err) {
464 case NO_ERROR:
465 // got a buffer
466 if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) != 0) {
467 ALOGV("Got codec config buffer (%zu bytes)", size);
468 if (muxer != NULL) {
469 // ignore this -- we passed the CSD into MediaMuxer when
470 // we got the format change notification
471 size = 0;
472 }
473 }
474 if (size != 0) {
475 ALOGV("Got data in buffer %zu, size=%zu, pts=%" PRId64,
476 bufIndex, size, ptsUsec);
477
478 { // scope
479 ATRACE_NAME("orientation");
480 // Check orientation, update if it has changed.
481 //
482 // Polling for changes is inefficient and wrong, but the
483 // useful stuff is hard to get at without a Dalvik VM.
484 ui::DisplayState displayState;
485 err = SurfaceComposerClient::getDisplayState(display, &displayState);
486 if (err != NO_ERROR) {
487 ALOGW("getDisplayState() failed: %d", err);
488 } else if (orientation != displayState.orientation) {
489 ALOGD("orientation changed, now %s", toCString(displayState.orientation));
490 SurfaceComposerClient::Transaction t;
491 setDisplayProjection(t, virtualDpy, displayState);
492 t.apply();
493 orientation = displayState.orientation;
494 }
495 }
496
497 // If the virtual display isn't providing us with timestamps,
498 // use the current time. This isn't great -- we could get
499 // decoded data in clusters -- but we're not expecting
500 // to hit this anyway.
501 if (ptsUsec == 0) {
502 ptsUsec = systemTime(SYSTEM_TIME_MONOTONIC) / 1000;
503 }
504
505 if (muxer == NULL) {
506 fwrite(buffers[bufIndex]->data(), 1, size, rawFp);
507 // Flush the data immediately in case we're streaming.
508 // We don't want to do this if all we've written is
509 // the SPS/PPS data because mplayer gets confused.
510 if ((flags & MediaCodec::BUFFER_FLAG_CODECCONFIG) == 0) {
511 fflush(rawFp);
512 }
513 } else {
514 // The MediaMuxer docs are unclear, but it appears that we
515 // need to pass either the full set of BufferInfo flags, or
516 // (flags & BUFFER_FLAG_SYNCFRAME).
517 //
518 // If this blocks for too long we could drop frames. We may
519 // want to queue these up and do them on a different thread.
520 ATRACE_NAME("write sample");
521 assert(trackIdx != -1);
522 // TODO
523 sp<ABuffer> buffer = new ABuffer(
524 buffers[bufIndex]->data(), buffers[bufIndex]->size());
525 AMediaCodecBufferInfo bufferInfo = {
526 0,
527 static_cast<int32_t>(buffer->size()),
528 ptsUsec,
529 flags
530 };
531 err = AMediaMuxer_writeSampleData(muxer, trackIdx, buffer->data(), &bufferInfo);
532 if (err != NO_ERROR) {
533 fprintf(stderr,
534 "Failed writing data to muxer (err=%d)\n", err);
535 return err;
536 }
537 if (gOutputFormat == FORMAT_MP4) {
538 timestamps.add(ptsUsec);
539 }
540 }
541 debugNumFrames++;
542 }
543 err = encoder->releaseOutputBuffer(bufIndex);
544 if (err != NO_ERROR) {
545 fprintf(stderr, "Unable to release output buffer (err=%d)\n",
546 err);
547 return err;
548 }
549 if ((flags & MediaCodec::BUFFER_FLAG_EOS) != 0) {
550 // Not expecting EOS from SurfaceFlinger. Go with it.
551 ALOGI("Received end-of-stream");
552 gStopRequested = true;
553 }
554 break;
555 case -EAGAIN: // INFO_TRY_AGAIN_LATER
556 ALOGV("Got -EAGAIN, looping");
557 break;
558 case android::INFO_FORMAT_CHANGED: // INFO_OUTPUT_FORMAT_CHANGED
559 {
560 // Format includes CSD, which we must provide to muxer.
561 ALOGV("Encoder format changed");
562 sp<AMessage> newFormat;
563 encoder->getOutputFormat(&newFormat);
564 // TODO remove when MediaCodec has been replaced with AMediaCodec
565 AMediaFormat *ndkFormat = AMediaFormat_fromMsg(&newFormat);
566 if (muxer != NULL) {
567 trackIdx = AMediaMuxer_addTrack(muxer, ndkFormat);
568 if (gOutputFormat == FORMAT_MP4) {
569 AMediaFormat *metaFormat = AMediaFormat_new();
570 AMediaFormat_setString(metaFormat, AMEDIAFORMAT_KEY_MIME, kMimeTypeApplicationOctetstream);
571 metaTrackIdx = AMediaMuxer_addTrack(muxer, metaFormat);
572 AMediaFormat_delete(metaFormat);
573 }
574 ALOGV("Starting muxer");
575 err = AMediaMuxer_start(muxer);
576 if (err != NO_ERROR) {
577 fprintf(stderr, "Unable to start muxer (err=%d)\n", err);
578 return err;
579 }
580 }
581 }
582 break;
583 case android::INFO_OUTPUT_BUFFERS_CHANGED: // INFO_OUTPUT_BUFFERS_CHANGED
584 // Not expected for an encoder; handle it anyway.
585 ALOGV("Encoder buffers changed");
586 err = encoder->getOutputBuffers(&buffers);
587 if (err != NO_ERROR) {
588 fprintf(stderr,
589 "Unable to get new output buffers (err=%d)\n", err);
590 return err;
591 }
592 break;
593 case INVALID_OPERATION:
594 ALOGW("dequeueOutputBuffer returned INVALID_OPERATION");
595 return err;
596 default:
597 fprintf(stderr,
598 "Got weird result %d from dequeueOutputBuffer\n", err);
599 return err;
600 }
601 }
602
603 ALOGV("Encoder stopping (req=%d)", gStopRequested);
604 if (gVerbose) {
605 printf("Encoder stopping; recorded %u frames in %" PRId64 " seconds\n",
606 debugNumFrames, nanoseconds_to_seconds(
607 systemTime(CLOCK_MONOTONIC) - startWhenNsec));
608 fflush(stdout);
609 }
610 if (metaTrackIdx >= 0 && !timestamps.isEmpty()) {
611 err = writeWinscopeMetadata(timestamps, metaTrackIdx, muxer);
612 if (err != NO_ERROR) {
613 fprintf(stderr, "Failed writing metadata to muxer (err=%d)\n", err);
614 return err;
615 }
616 }
617 return NO_ERROR;
618 }
619
620 /*
621 * Raw H.264 byte stream output requested. Send the output to stdout
622 * if desired. If the output is a tty, reconfigure it to avoid the
623 * CRLF line termination that we see with "adb shell" commands.
624 */
prepareRawOutput(const char * fileName)625 static FILE* prepareRawOutput(const char* fileName) {
626 FILE* rawFp = NULL;
627
628 if (strcmp(fileName, "-") == 0) {
629 if (gVerbose) {
630 fprintf(stderr, "ERROR: verbose output and '-' not compatible");
631 return NULL;
632 }
633 rawFp = stdout;
634 } else {
635 rawFp = fopen(fileName, "w");
636 if (rawFp == NULL) {
637 fprintf(stderr, "fopen raw failed: %s\n", strerror(errno));
638 return NULL;
639 }
640 }
641
642 int fd = fileno(rawFp);
643 if (isatty(fd)) {
644 // best effort -- reconfigure tty for "raw"
645 ALOGD("raw video output to tty (fd=%d)", fd);
646 struct termios term;
647 if (tcgetattr(fd, &term) == 0) {
648 cfmakeraw(&term);
649 if (tcsetattr(fd, TCSANOW, &term) == 0) {
650 ALOGD("tty successfully configured for raw");
651 }
652 }
653 }
654
655 return rawFp;
656 }
657
floorToEven(uint32_t num)658 static inline uint32_t floorToEven(uint32_t num) {
659 return num & ~1;
660 }
661
662 /*
663 * Main "do work" start point.
664 *
665 * Configures codec, muxer, and virtual display, then starts moving bits
666 * around.
667 */
recordScreen(const char * fileName)668 static status_t recordScreen(const char* fileName) {
669 status_t err;
670
671 // Configure signal handler.
672 err = configureSignals();
673 if (err != NO_ERROR) return err;
674
675 // Start Binder thread pool. MediaCodec needs to be able to receive
676 // messages from mediaserver.
677 sp<ProcessState> self = ProcessState::self();
678 self->startThreadPool();
679
680 // Get main display parameters.
681 sp<IBinder> display = SurfaceComposerClient::getPhysicalDisplayToken(
682 gPhysicalDisplayId);
683 if (display == nullptr) {
684 fprintf(stderr, "ERROR: no display\n");
685 return NAME_NOT_FOUND;
686 }
687
688 ui::DisplayState displayState;
689 err = SurfaceComposerClient::getDisplayState(display, &displayState);
690 if (err != NO_ERROR) {
691 fprintf(stderr, "ERROR: unable to get display state\n");
692 return err;
693 }
694
695 DisplayConfig displayConfig;
696 err = SurfaceComposerClient::getActiveDisplayConfig(display, &displayConfig);
697 if (err != NO_ERROR) {
698 fprintf(stderr, "ERROR: unable to get display config\n");
699 return err;
700 }
701
702 const ui::Size& viewport = displayState.viewport;
703 if (gVerbose) {
704 printf("Display is %dx%d @%.2ffps (orientation=%s), layerStack=%u\n",
705 viewport.getWidth(), viewport.getHeight(), displayConfig.refreshRate,
706 toCString(displayState.orientation), displayState.layerStack);
707 fflush(stdout);
708 }
709
710 // Encoder can't take odd number as config
711 if (gVideoWidth == 0) {
712 gVideoWidth = floorToEven(viewport.getWidth());
713 }
714 if (gVideoHeight == 0) {
715 gVideoHeight = floorToEven(viewport.getHeight());
716 }
717
718 // Configure and start the encoder.
719 sp<MediaCodec> encoder;
720 sp<FrameOutput> frameOutput;
721 sp<IGraphicBufferProducer> encoderInputSurface;
722 if (gOutputFormat != FORMAT_FRAMES && gOutputFormat != FORMAT_RAW_FRAMES) {
723 err = prepareEncoder(displayConfig.refreshRate, &encoder, &encoderInputSurface);
724
725 if (err != NO_ERROR && !gSizeSpecified) {
726 // fallback is defined for landscape; swap if we're in portrait
727 bool needSwap = gVideoWidth < gVideoHeight;
728 uint32_t newWidth = needSwap ? kFallbackHeight : kFallbackWidth;
729 uint32_t newHeight = needSwap ? kFallbackWidth : kFallbackHeight;
730 if (gVideoWidth != newWidth && gVideoHeight != newHeight) {
731 ALOGV("Retrying with 720p");
732 fprintf(stderr, "WARNING: failed at %dx%d, retrying at %dx%d\n",
733 gVideoWidth, gVideoHeight, newWidth, newHeight);
734 gVideoWidth = newWidth;
735 gVideoHeight = newHeight;
736 err = prepareEncoder(displayConfig.refreshRate, &encoder, &encoderInputSurface);
737 }
738 }
739 if (err != NO_ERROR) return err;
740
741 // From here on, we must explicitly release() the encoder before it goes
742 // out of scope, or we will get an assertion failure from stagefright
743 // later on in a different thread.
744 } else {
745 // We're not using an encoder at all. The "encoder input surface" we hand to
746 // SurfaceFlinger will just feed directly to us.
747 frameOutput = new FrameOutput();
748 err = frameOutput->createInputSurface(gVideoWidth, gVideoHeight, &encoderInputSurface);
749 if (err != NO_ERROR) {
750 return err;
751 }
752 }
753
754 // Draw the "info" page by rendering a frame with GLES and sending
755 // it directly to the encoder.
756 // TODO: consider displaying this as a regular layer to avoid b/11697754
757 if (gWantInfoScreen) {
758 Overlay::drawInfoPage(encoderInputSurface);
759 }
760
761 // Configure optional overlay.
762 sp<IGraphicBufferProducer> bufferProducer;
763 sp<Overlay> overlay;
764 if (gWantFrameTime) {
765 // Send virtual display frames to an external texture.
766 overlay = new Overlay(gMonotonicTime);
767 err = overlay->start(encoderInputSurface, &bufferProducer);
768 if (err != NO_ERROR) {
769 if (encoder != NULL) encoder->release();
770 return err;
771 }
772 if (gVerbose) {
773 printf("Bugreport overlay created\n");
774 fflush(stdout);
775 }
776 } else {
777 // Use the encoder's input surface as the virtual display surface.
778 bufferProducer = encoderInputSurface;
779 }
780
781 // Configure virtual display.
782 sp<IBinder> dpy;
783 err = prepareVirtualDisplay(displayState, bufferProducer, &dpy);
784 if (err != NO_ERROR) {
785 if (encoder != NULL) encoder->release();
786 return err;
787 }
788
789 AMediaMuxer *muxer = nullptr;
790 FILE* rawFp = NULL;
791 switch (gOutputFormat) {
792 case FORMAT_MP4:
793 case FORMAT_WEBM:
794 case FORMAT_3GPP: {
795 // Configure muxer. We have to wait for the CSD blob from the encoder
796 // before we can start it.
797 err = unlink(fileName);
798 if (err != 0 && errno != ENOENT) {
799 fprintf(stderr, "ERROR: couldn't remove existing file\n");
800 abort();
801 }
802 int fd = open(fileName, O_CREAT | O_LARGEFILE | O_TRUNC | O_RDWR, S_IRUSR | S_IWUSR);
803 if (fd < 0) {
804 fprintf(stderr, "ERROR: couldn't open file\n");
805 abort();
806 }
807 if (gOutputFormat == FORMAT_MP4) {
808 muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_MPEG_4);
809 } else if (gOutputFormat == FORMAT_WEBM) {
810 muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_WEBM);
811 } else {
812 muxer = AMediaMuxer_new(fd, AMEDIAMUXER_OUTPUT_FORMAT_THREE_GPP);
813 }
814 close(fd);
815 if (gRotate) {
816 AMediaMuxer_setOrientationHint(muxer, 90); // TODO: does this do anything?
817 }
818 break;
819 }
820 case FORMAT_H264:
821 case FORMAT_FRAMES:
822 case FORMAT_RAW_FRAMES: {
823 rawFp = prepareRawOutput(fileName);
824 if (rawFp == NULL) {
825 if (encoder != NULL) encoder->release();
826 return -1;
827 }
828 break;
829 }
830 default:
831 fprintf(stderr, "ERROR: unknown format %d\n", gOutputFormat);
832 abort();
833 }
834
835 if (gOutputFormat == FORMAT_FRAMES || gOutputFormat == FORMAT_RAW_FRAMES) {
836 // TODO: if we want to make this a proper feature, we should output
837 // an outer header with version info. Right now we never change
838 // the frame size or format, so we could conceivably just send
839 // the current frame header once and then follow it with an
840 // unbroken stream of data.
841
842 // Make the EGL context current again. This gets unhooked if we're
843 // using "--bugreport" mode.
844 // TODO: figure out if we can eliminate this
845 frameOutput->prepareToCopy();
846
847 while (!gStopRequested) {
848 // Poll for frames, the same way we do for MediaCodec. We do
849 // all of the work on the main thread.
850 //
851 // Ideally we'd sleep indefinitely and wake when the
852 // stop was requested, but this will do for now. (It almost
853 // works because wait() wakes when a signal hits, but we
854 // need to handle the edge cases.)
855 bool rawFrames = gOutputFormat == FORMAT_RAW_FRAMES;
856 err = frameOutput->copyFrame(rawFp, 250000, rawFrames);
857 if (err == ETIMEDOUT) {
858 err = NO_ERROR;
859 } else if (err != NO_ERROR) {
860 ALOGE("Got error %d from copyFrame()", err);
861 break;
862 }
863 }
864 } else {
865 // Main encoder loop.
866 err = runEncoder(encoder, muxer, rawFp, display, dpy, displayState.orientation);
867 if (err != NO_ERROR) {
868 fprintf(stderr, "Encoder failed (err=%d)\n", err);
869 // fall through to cleanup
870 }
871
872 if (gVerbose) {
873 printf("Stopping encoder and muxer\n");
874 fflush(stdout);
875 }
876 }
877
878 // Shut everything down, starting with the producer side.
879 encoderInputSurface = NULL;
880 SurfaceComposerClient::destroyDisplay(dpy);
881 if (overlay != NULL) overlay->stop();
882 if (encoder != NULL) encoder->stop();
883 if (muxer != NULL) {
884 // If we don't stop muxer explicitly, i.e. let the destructor run,
885 // it may hang (b/11050628).
886 err = AMediaMuxer_stop(muxer);
887 } else if (rawFp != stdout) {
888 fclose(rawFp);
889 }
890 if (encoder != NULL) encoder->release();
891
892 return err;
893 }
894
895 /*
896 * Sends a broadcast to the media scanner to tell it about the new video.
897 *
898 * This is optional, but nice to have.
899 */
notifyMediaScanner(const char * fileName)900 static status_t notifyMediaScanner(const char* fileName) {
901 // need to do allocations before the fork()
902 String8 fileUrl("file://");
903 fileUrl.append(fileName);
904
905 const char* kCommand = "/system/bin/am";
906 const char* const argv[] = {
907 kCommand,
908 "broadcast",
909 "-a",
910 "android.intent.action.MEDIA_SCANNER_SCAN_FILE",
911 "-d",
912 fileUrl.string(),
913 NULL
914 };
915 if (gVerbose) {
916 printf("Executing:");
917 for (int i = 0; argv[i] != NULL; i++) {
918 printf(" %s", argv[i]);
919 }
920 putchar('\n');
921 fflush(stdout);
922 }
923
924 pid_t pid = fork();
925 if (pid < 0) {
926 int err = errno;
927 ALOGW("fork() failed: %s", strerror(err));
928 return -err;
929 } else if (pid > 0) {
930 // parent; wait for the child, mostly to make the verbose-mode output
931 // look right, but also to check for and log failures
932 int status;
933 pid_t actualPid = TEMP_FAILURE_RETRY(waitpid(pid, &status, 0));
934 if (actualPid != pid) {
935 ALOGW("waitpid(%d) returned %d (errno=%d)", pid, actualPid, errno);
936 } else if (status != 0) {
937 ALOGW("'am broadcast' exited with status=%d", status);
938 } else {
939 ALOGV("'am broadcast' exited successfully");
940 }
941 } else {
942 if (!gVerbose) {
943 // non-verbose, suppress 'am' output
944 ALOGV("closing stdout/stderr in child");
945 int fd = open("/dev/null", O_WRONLY);
946 if (fd >= 0) {
947 dup2(fd, STDOUT_FILENO);
948 dup2(fd, STDERR_FILENO);
949 close(fd);
950 }
951 }
952 execv(kCommand, const_cast<char* const*>(argv));
953 ALOGE("execv(%s) failed: %s\n", kCommand, strerror(errno));
954 exit(1);
955 }
956 return NO_ERROR;
957 }
958
959 /*
960 * Parses a string of the form "1280x720".
961 *
962 * Returns true on success.
963 */
parseWidthHeight(const char * widthHeight,uint32_t * pWidth,uint32_t * pHeight)964 static bool parseWidthHeight(const char* widthHeight, uint32_t* pWidth,
965 uint32_t* pHeight) {
966 long width, height;
967 char* end;
968
969 // Must specify base 10, or "0x0" gets parsed differently.
970 width = strtol(widthHeight, &end, 10);
971 if (end == widthHeight || *end != 'x' || *(end+1) == '\0') {
972 // invalid chars in width, or missing 'x', or missing height
973 return false;
974 }
975 height = strtol(end + 1, &end, 10);
976 if (*end != '\0') {
977 // invalid chars in height
978 return false;
979 }
980
981 *pWidth = width;
982 *pHeight = height;
983 return true;
984 }
985
986 /*
987 * Accepts a string with a bare number ("4000000") or with a single-character
988 * unit ("4m").
989 *
990 * Returns an error if parsing fails.
991 */
parseValueWithUnit(const char * str,uint32_t * pValue)992 static status_t parseValueWithUnit(const char* str, uint32_t* pValue) {
993 long value;
994 char* endptr;
995
996 value = strtol(str, &endptr, 10);
997 if (*endptr == '\0') {
998 // bare number
999 *pValue = value;
1000 return NO_ERROR;
1001 } else if (toupper(*endptr) == 'M' && *(endptr+1) == '\0') {
1002 *pValue = value * 1000000; // check for overflow?
1003 return NO_ERROR;
1004 } else {
1005 fprintf(stderr, "Unrecognized value: %s\n", str);
1006 return UNKNOWN_ERROR;
1007 }
1008 }
1009
1010 /*
1011 * Dumps usage on stderr.
1012 */
usage()1013 static void usage() {
1014 fprintf(stderr,
1015 "Usage: screenrecord [options] <filename>\n"
1016 "\n"
1017 "Android screenrecord v%d.%d. Records the device's display to a .mp4 file.\n"
1018 "\n"
1019 "Options:\n"
1020 "--size WIDTHxHEIGHT\n"
1021 " Set the video size, e.g. \"1280x720\". Default is the device's main\n"
1022 " display resolution (if supported), 1280x720 if not. For best results,\n"
1023 " use a size supported by the AVC encoder.\n"
1024 "--bit-rate RATE\n"
1025 " Set the video bit rate, in bits per second. Value may be specified as\n"
1026 " bits or megabits, e.g. '4000000' is equivalent to '4M'. Default %dMbps.\n"
1027 "--bugreport\n"
1028 " Add additional information, such as a timestamp overlay, that is helpful\n"
1029 " in videos captured to illustrate bugs.\n"
1030 "--time-limit TIME\n"
1031 " Set the maximum recording time, in seconds. Default / maximum is %d.\n"
1032 "--display-id ID\n"
1033 " specify the physical display ID to record. Default is the primary display.\n"
1034 " see \"dumpsys SurfaceFlinger --display-id\" for valid display IDs.\n"
1035 "--verbose\n"
1036 " Display interesting information on stdout.\n"
1037 "--help\n"
1038 " Show this message.\n"
1039 "\n"
1040 "Recording continues until Ctrl-C is hit or the time limit is reached.\n"
1041 "\n",
1042 kVersionMajor, kVersionMinor, gBitRate / 1000000, gTimeLimitSec
1043 );
1044 }
1045
1046 /*
1047 * Parses args and kicks things off.
1048 */
main(int argc,char * const argv[])1049 int main(int argc, char* const argv[]) {
1050 static const struct option longOptions[] = {
1051 { "help", no_argument, NULL, 'h' },
1052 { "verbose", no_argument, NULL, 'v' },
1053 { "size", required_argument, NULL, 's' },
1054 { "bit-rate", required_argument, NULL, 'b' },
1055 { "time-limit", required_argument, NULL, 't' },
1056 { "bugreport", no_argument, NULL, 'u' },
1057 // "unofficial" options
1058 { "show-device-info", no_argument, NULL, 'i' },
1059 { "show-frame-time", no_argument, NULL, 'f' },
1060 { "rotate", no_argument, NULL, 'r' },
1061 { "output-format", required_argument, NULL, 'o' },
1062 { "codec-name", required_argument, NULL, 'N' },
1063 { "monotonic-time", no_argument, NULL, 'm' },
1064 { "persistent-surface", no_argument, NULL, 'p' },
1065 { "bframes", required_argument, NULL, 'B' },
1066 { "display-id", required_argument, NULL, 'd' },
1067 { NULL, 0, NULL, 0 }
1068 };
1069
1070 std::optional<PhysicalDisplayId> displayId = SurfaceComposerClient::getInternalDisplayId();
1071 if (!displayId) {
1072 fprintf(stderr, "Failed to get token for internal display\n");
1073 return 1;
1074 }
1075
1076 gPhysicalDisplayId = *displayId;
1077
1078 while (true) {
1079 int optionIndex = 0;
1080 int ic = getopt_long(argc, argv, "", longOptions, &optionIndex);
1081 if (ic == -1) {
1082 break;
1083 }
1084
1085 switch (ic) {
1086 case 'h':
1087 usage();
1088 return 0;
1089 case 'v':
1090 gVerbose = true;
1091 break;
1092 case 's':
1093 if (!parseWidthHeight(optarg, &gVideoWidth, &gVideoHeight)) {
1094 fprintf(stderr, "Invalid size '%s', must be width x height\n",
1095 optarg);
1096 return 2;
1097 }
1098 if (gVideoWidth == 0 || gVideoHeight == 0) {
1099 fprintf(stderr,
1100 "Invalid size %ux%u, width and height may not be zero\n",
1101 gVideoWidth, gVideoHeight);
1102 return 2;
1103 }
1104 gSizeSpecified = true;
1105 break;
1106 case 'b':
1107 if (parseValueWithUnit(optarg, &gBitRate) != NO_ERROR) {
1108 return 2;
1109 }
1110 if (gBitRate < kMinBitRate || gBitRate > kMaxBitRate) {
1111 fprintf(stderr,
1112 "Bit rate %dbps outside acceptable range [%d,%d]\n",
1113 gBitRate, kMinBitRate, kMaxBitRate);
1114 return 2;
1115 }
1116 break;
1117 case 't':
1118 gTimeLimitSec = atoi(optarg);
1119 if (gTimeLimitSec == 0 || gTimeLimitSec > kMaxTimeLimitSec) {
1120 fprintf(stderr,
1121 "Time limit %ds outside acceptable range [1,%d]\n",
1122 gTimeLimitSec, kMaxTimeLimitSec);
1123 return 2;
1124 }
1125 break;
1126 case 'u':
1127 gWantInfoScreen = true;
1128 gWantFrameTime = true;
1129 break;
1130 case 'i':
1131 gWantInfoScreen = true;
1132 break;
1133 case 'f':
1134 gWantFrameTime = true;
1135 break;
1136 case 'r':
1137 // experimental feature
1138 gRotate = true;
1139 break;
1140 case 'o':
1141 if (strcmp(optarg, "mp4") == 0) {
1142 gOutputFormat = FORMAT_MP4;
1143 } else if (strcmp(optarg, "h264") == 0) {
1144 gOutputFormat = FORMAT_H264;
1145 } else if (strcmp(optarg, "webm") == 0) {
1146 gOutputFormat = FORMAT_WEBM;
1147 } else if (strcmp(optarg, "3gpp") == 0) {
1148 gOutputFormat = FORMAT_3GPP;
1149 } else if (strcmp(optarg, "frames") == 0) {
1150 gOutputFormat = FORMAT_FRAMES;
1151 } else if (strcmp(optarg, "raw-frames") == 0) {
1152 gOutputFormat = FORMAT_RAW_FRAMES;
1153 } else {
1154 fprintf(stderr, "Unknown format '%s'\n", optarg);
1155 return 2;
1156 }
1157 break;
1158 case 'N':
1159 gCodecName = optarg;
1160 break;
1161 case 'm':
1162 gMonotonicTime = true;
1163 break;
1164 case 'p':
1165 gPersistentSurface = true;
1166 break;
1167 case 'B':
1168 if (parseValueWithUnit(optarg, &gBframes) != NO_ERROR) {
1169 return 2;
1170 }
1171 break;
1172 case 'd':
1173 gPhysicalDisplayId = atoll(optarg);
1174 if (gPhysicalDisplayId == 0) {
1175 fprintf(stderr, "Please specify a valid physical display id\n");
1176 return 2;
1177 } else if (SurfaceComposerClient::
1178 getPhysicalDisplayToken(gPhysicalDisplayId) == nullptr) {
1179 fprintf(stderr, "Invalid physical display id: %"
1180 ANDROID_PHYSICAL_DISPLAY_ID_FORMAT "\n", gPhysicalDisplayId);
1181 return 2;
1182 }
1183 break;
1184 default:
1185 if (ic != '?') {
1186 fprintf(stderr, "getopt_long returned unexpected value 0x%x\n", ic);
1187 }
1188 return 2;
1189 }
1190 }
1191
1192 if (optind != argc - 1) {
1193 fprintf(stderr, "Must specify output file (see --help).\n");
1194 return 2;
1195 }
1196
1197 const char* fileName = argv[optind];
1198 if (gOutputFormat == FORMAT_MP4) {
1199 // MediaMuxer tries to create the file in the constructor, but we don't
1200 // learn about the failure until muxer.start(), which returns a generic
1201 // error code without logging anything. We attempt to create the file
1202 // now for better diagnostics.
1203 int fd = open(fileName, O_CREAT | O_RDWR, 0644);
1204 if (fd < 0) {
1205 fprintf(stderr, "Unable to open '%s': %s\n", fileName, strerror(errno));
1206 return 1;
1207 }
1208 close(fd);
1209 }
1210
1211 status_t err = recordScreen(fileName);
1212 if (err == NO_ERROR) {
1213 // Try to notify the media scanner. Not fatal if this fails.
1214 notifyMediaScanner(fileName);
1215 }
1216 ALOGD(err == NO_ERROR ? "success" : "failed");
1217 return (int) err;
1218 }
1219