1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.cts; 18 19 import android.app.Presentation; 20 import android.content.ComponentName; 21 import android.content.Context; 22 import android.content.Intent; 23 import android.content.ServiceConnection; 24 import android.graphics.SurfaceTexture; 25 import android.graphics.drawable.ColorDrawable; 26 import android.hardware.display.DisplayManager; 27 import android.hardware.display.VirtualDisplay; 28 import android.media.MediaCodec; 29 import android.media.MediaCodec.BufferInfo; 30 import android.media.MediaCodecInfo; 31 import android.media.MediaCodecList; 32 import android.media.MediaFormat; 33 import android.media.cts.R; 34 import android.opengl.GLES11Ext; 35 import android.opengl.GLES20; 36 import android.opengl.Matrix; 37 import android.os.Bundle; 38 import android.os.Handler; 39 import android.os.IBinder; 40 import android.os.Looper; 41 import android.os.Message; 42 import android.os.Parcel; 43 import android.platform.test.annotations.RequiresDevice; 44 import android.util.Log; 45 import android.util.Size; 46 import android.view.Display; 47 import android.view.Surface; 48 import android.view.View; 49 import android.view.ViewGroup; 50 import android.view.ViewGroup.LayoutParams; 51 import android.view.WindowManager; 52 import android.widget.FrameLayout; 53 import android.widget.ImageView; 54 import android.widget.TableLayout; 55 import android.widget.TableRow; 56 57 import androidx.test.filters.SmallTest; 58 59 import java.nio.ByteBuffer; 60 import java.nio.ByteOrder; 61 import java.nio.FloatBuffer; 62 import java.nio.IntBuffer; 63 import java.util.ArrayList; 64 import java.util.Arrays; 65 import java.util.List; 66 import java.util.concurrent.Semaphore; 67 import java.util.concurrent.TimeUnit; 68 import java.util.concurrent.atomic.AtomicInteger; 69 70 import static org.junit.Assert.assertNotNull; 71 import static org.junit.Assert.assertTrue; 72 import static org.junit.Assert.fail; 73 74 /** 75 * Impl class for tests using MediaCodec encoding with composition of multiple virtual displays. 76 */ 77 public class EncodeVirtualDisplayWithCompositionTestImpl { 78 private static final String TAG = "EncodeVirtualDisplayWithCompositionTestImpl"; 79 private static final boolean DBG = false; 80 private static final String MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC; 81 82 private static final long DEFAULT_WAIT_TIMEOUT_MS = 10000; // 10 seconds 83 private static final long DEQUEUE_TIMEOUT_US = 3000000; // 3 seconds 84 85 private static final int COLOR_RED = makeColor(100, 0, 0); 86 private static final int COLOR_GREEN = makeColor(0, 100, 0); 87 private static final int COLOR_BLUE = makeColor(0, 0, 100); 88 private static final int COLOR_GREY = makeColor(100, 100, 100); 89 90 static final int BITRATE_1080p = 20000000; 91 static final int BITRATE_720p = 14000000; 92 static final int BITRATE_800x480 = 14000000; 93 static final int BITRATE_DEFAULT = 10000000; 94 95 private static final int IFRAME_INTERVAL = 10; 96 97 private static final int MAX_NUM_WINDOWS = 3; 98 99 private static Handler sHandlerForRunOnMain = new Handler(Looper.getMainLooper()); 100 101 private Surface mEncodingSurface; 102 private OutputSurface mDecodingSurface; 103 private volatile boolean mCodecConfigReceived = false; 104 private volatile boolean mCodecBufferReceived = false; 105 private EncodingHelper mEncodingHelper; 106 private MediaCodec mDecoder; 107 private final ByteBuffer mPixelBuf = ByteBuffer.allocateDirect(4); 108 private volatile boolean mIsQuitting = false; 109 private Throwable mTestException; 110 private VirtualDisplayPresentation mLocalPresentation; 111 private RemoteVirtualDisplayPresentation mRemotePresentation; 112 private ByteBuffer[] mDecoderInputBuffers; 113 114 /** event listener for test without verifying output */ 115 private EncoderEventListener mEncoderEventListener = new EncoderEventListener() { 116 @Override 117 public void onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info) { 118 mCodecConfigReceived = true; 119 } 120 @Override 121 public void onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info) { 122 mCodecBufferReceived = true; 123 } 124 @Override 125 public void onError(String errorMessage) { 126 fail(errorMessage); 127 } 128 }; 129 130 /* TEST_COLORS static initialization; need ARGB for ColorDrawable */ makeColor(int red, int green, int blue)131 private static int makeColor(int red, int green, int blue) { 132 return 0xff << 24 | (red & 0xff) << 16 | (green & 0xff) << 8 | (blue & 0xff); 133 } 134 135 /** 136 * Run rendering test in a separate thread. This is necessary as {@link OutputSurface} requires 137 * constructing it in a non-test thread. 138 * @param w 139 * @param h 140 * @throws Exception 141 */ runTestRenderingInSeparateThread(final Context context, final String mimeType, final int w, final int h, final boolean runRemotely, final boolean multipleWindows)142 void runTestRenderingInSeparateThread(final Context context, final String mimeType, 143 final int w, final int h, final boolean runRemotely, final boolean multipleWindows) 144 throws Throwable { 145 runTestRenderingInSeparateThread( 146 context, mimeType, w, h, runRemotely, multipleWindows, /* degrees */ 0, null); 147 } 148 runTestRenderingInSeparateThread(final Context context, final String mimeType, final int w, final int h, final boolean runRemotely, final boolean multipleWindows, final int degrees, final String decoderName)149 void runTestRenderingInSeparateThread(final Context context, final String mimeType, 150 final int w, final int h, final boolean runRemotely, final boolean multipleWindows, 151 final int degrees, final String decoderName) throws Throwable { 152 mTestException = null; 153 Thread renderingThread = new Thread(new Runnable() { 154 public void run() { 155 try { 156 doTestRenderingOutput( 157 context, mimeType, w, h, runRemotely, multipleWindows, 158 degrees, decoderName); 159 } catch (Throwable t) { 160 t.printStackTrace(); 161 mTestException = t; 162 } 163 } 164 }); 165 renderingThread.start(); 166 renderingThread.join(60000); 167 assertTrue(!renderingThread.isAlive()); 168 if (mTestException != null) { 169 throw mTestException; 170 } 171 } 172 doTestRenderingOutput(final Context context, String mimeType, int w, int h, boolean runRemotely, boolean multipleWindows, int degrees, String decoderName)173 private void doTestRenderingOutput(final Context context, String mimeType, int w, int h, 174 boolean runRemotely, boolean multipleWindows, int degrees, 175 String decoderName) throws Throwable { 176 if (DBG) { 177 Log.i(TAG, "doTestRenderingOutput for type:" + mimeType + " w:" + w + " h:" + h); 178 } 179 try { 180 mIsQuitting = false; 181 if (decoderName == null) { 182 mDecoder = MediaCodec.createDecoderByType(mimeType); 183 } else { 184 mDecoder = MediaCodec.createByCodecName(decoderName); 185 } 186 MediaFormat decoderFormat = MediaFormat.createVideoFormat(mimeType, w, h); 187 decoderFormat.setInteger( 188 MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_LIMITED); 189 decoderFormat.setInteger( 190 MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT601_PAL); 191 decoderFormat.setInteger( 192 MediaFormat.KEY_COLOR_TRANSFER, MediaFormat.COLOR_TRANSFER_SDR_VIDEO); 193 if (degrees != 0) { 194 decoderFormat.setInteger(MediaFormat.KEY_ROTATION, degrees); 195 } 196 mDecodingSurface = new OutputSurface(w, h); 197 mDecoder.configure(decoderFormat, mDecodingSurface.getSurface(), null, 0); 198 // only scale to fit scaling mode is supported 199 mDecoder.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT); 200 mDecoder.start(); 201 mDecoderInputBuffers = mDecoder.getInputBuffers(); 202 203 mEncodingHelper = new EncodingHelper(); 204 mEncodingSurface = mEncodingHelper.startEncoding(mimeType, w, h, 205 new EncoderEventListener() { 206 @Override 207 public void onCodecConfig(ByteBuffer data, BufferInfo info) { 208 if (DBG) { 209 Log.i(TAG, "onCodecConfig l:" + info.size); 210 } 211 handleEncodedData(data, info); 212 } 213 214 @Override 215 public void onBufferReady(ByteBuffer data, BufferInfo info) { 216 if (DBG) { 217 Log.i(TAG, "onBufferReady l:" + info.size); 218 } 219 handleEncodedData(data, info); 220 } 221 222 @Override 223 public void onError(String errorMessage) { 224 fail(errorMessage); 225 } 226 227 private void handleEncodedData(ByteBuffer data, BufferInfo info) { 228 if (mIsQuitting) { 229 if (DBG) { 230 Log.i(TAG, "ignore data as test is quitting"); 231 } 232 return; 233 } 234 int inputBufferIndex = mDecoder.dequeueInputBuffer(DEQUEUE_TIMEOUT_US); 235 if (inputBufferIndex < 0) { 236 if (DBG) { 237 Log.i(TAG, "dequeueInputBuffer returned:" + inputBufferIndex); 238 } 239 return; 240 } 241 assertTrue(inputBufferIndex >= 0); 242 ByteBuffer inputBuffer = mDecoderInputBuffers[inputBufferIndex]; 243 inputBuffer.clear(); 244 inputBuffer.put(data); 245 mDecoder.queueInputBuffer(inputBufferIndex, 0, info.size, 246 info.presentationTimeUs, info.flags); 247 } 248 }); 249 GlCompositor compositor = new GlCompositor(context); 250 if (DBG) { 251 Log.i(TAG, "start composition"); 252 } 253 compositor.startComposition(mEncodingSurface, w, h, multipleWindows ? 3 : 1); 254 255 if (DBG) { 256 Log.i(TAG, "create display"); 257 } 258 259 Renderer renderer = null; 260 Surface windowSurface = compositor.getWindowSurface(multipleWindows? 1 : 0); 261 if (runRemotely) { 262 mRemotePresentation = 263 new RemoteVirtualDisplayPresentation(context, windowSurface, w, h); 264 mRemotePresentation.connect(); 265 mRemotePresentation.start(); 266 renderer = mRemotePresentation; 267 } else { 268 mLocalPresentation = (degrees == 0) 269 ? new VirtualDisplayPresentation(context, windowSurface, w, h) 270 : new RotateVirtualDisplayPresentation(context, windowSurface, w, h); 271 mLocalPresentation.createVirtualDisplay(); 272 mLocalPresentation.createPresentation(); 273 renderer = mLocalPresentation; 274 } 275 276 if (DBG) { 277 Log.i(TAG, "start rendering and check"); 278 } 279 if (degrees == 0) { 280 renderColorAndCheckResult(renderer, w, h, COLOR_RED); 281 renderColorAndCheckResult(renderer, w, h, COLOR_BLUE); 282 renderColorAndCheckResult(renderer, w, h, COLOR_GREEN); 283 renderColorAndCheckResult(renderer, w, h, COLOR_GREY); 284 } else { 285 renderRotationAndCheckResult(renderer, w, h, degrees); 286 } 287 288 mIsQuitting = true; 289 if (runRemotely) { 290 mRemotePresentation.disconnect(); 291 } else { 292 mLocalPresentation.dismissPresentation(); 293 mLocalPresentation.destroyVirtualDisplay(); 294 } 295 296 compositor.stopComposition(); 297 } finally { 298 if (mEncodingHelper != null) { 299 mEncodingHelper.stopEncoding(); 300 mEncodingHelper = null; 301 } 302 if (mDecoder != null) { 303 mDecoder.stop(); 304 mDecoder.release(); 305 mDecoder = null; 306 } 307 if (mDecodingSurface != null) { 308 mDecodingSurface.release(); 309 mDecodingSurface = null; 310 } 311 } 312 } 313 314 private static final int NUM_MAX_RETRY = 120; 315 private static final int IMAGE_WAIT_TIMEOUT_MS = 1000; 316 renderColorAndCheckResult(Renderer renderer, int w, int h, int color)317 private void renderColorAndCheckResult(Renderer renderer, int w, int h, 318 int color) throws Exception { 319 BufferInfo info = new BufferInfo(); 320 for (int i = 0; i < NUM_MAX_RETRY; i++) { 321 renderer.doRendering(color); 322 int bufferIndex = mDecoder.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT_US); 323 if (DBG) { 324 Log.i(TAG, "decoder dequeueOutputBuffer returned " + bufferIndex); 325 } 326 if (bufferIndex < 0) { 327 continue; 328 } 329 mDecoder.releaseOutputBuffer(bufferIndex, true); 330 if (mDecodingSurface.checkForNewImage(IMAGE_WAIT_TIMEOUT_MS)) { 331 mDecodingSurface.drawImage(); 332 if (checkSurfaceFrameColor(w, h, color)) { 333 Log.i(TAG, "color " + Integer.toHexString(color) + " matched"); 334 return; 335 } 336 } else if(DBG) { 337 Log.i(TAG, "no rendering yet"); 338 } 339 } 340 fail("Color did not match"); 341 } 342 renderRotationAndCheckResult(Renderer renderer, int w, int h, int degrees)343 private void renderRotationAndCheckResult(Renderer renderer, int w, int h, 344 int degrees) throws Exception { 345 BufferInfo info = new BufferInfo(); 346 for (int i = 0; i < NUM_MAX_RETRY; i++) { 347 renderer.doRendering(-1); 348 int bufferIndex = mDecoder.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT_US); 349 if (DBG) { 350 Log.i(TAG, "decoder dequeueOutputBuffer returned " + bufferIndex); 351 } 352 if (bufferIndex < 0) { 353 continue; 354 } 355 mDecoder.releaseOutputBuffer(bufferIndex, true); 356 if (mDecodingSurface.checkForNewImage(IMAGE_WAIT_TIMEOUT_MS)) { 357 mDecodingSurface.drawImage(); 358 if (checkRotatedFrameQuadrants(w, h, degrees)) { 359 Log.i(TAG, "output rotated " + degrees + " degrees"); 360 return; 361 } 362 } else if(DBG) { 363 Log.i(TAG, "no rendering yet"); 364 } 365 } 366 fail("Frame not properly rotated"); 367 } 368 checkRotatedFrameQuadrants(int w, int h, int degrees)369 private boolean checkRotatedFrameQuadrants(int w, int h, int degrees) { 370 // Read a pixel from each quadrant of the surface. 371 int ww = w / 4; 372 int hh = h / 4; 373 // coords is ordered counter clockwise (note, gl 0,0 is bottom left) 374 int[][] coords = new int[][] {{ww, hh}, {ww * 3, hh}, {ww * 3, hh * 3}, {ww, hh * 3}}; 375 List<Integer> expected = new ArrayList<>(); 376 List<Integer> colors = Arrays.asList( 377 new Integer[] {COLOR_GREEN, COLOR_BLUE, COLOR_RED, COLOR_GREY}); 378 expected.addAll(colors); 379 expected.addAll(colors); 380 int offset = (degrees / 90) % 4; 381 for (int i = 0; i < coords.length; i++) { 382 int[] c = coords[i]; 383 int x = c[0]; 384 int y = c[1]; 385 GLES20.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); 386 int r = mPixelBuf.get(0) & 0xff; 387 int g = mPixelBuf.get(1) & 0xff; 388 int b = mPixelBuf.get(2) & 0xff; 389 // adding the offset to rotate expected colors clockwise 390 int color = expected.get(offset + i); 391 int redExpected = (color >> 16) & 0xff; 392 int greenExpected = (color >> 8) & 0xff; 393 int blueExpected = color & 0xff; 394 Log.i(TAG, String.format("(%d,%d) expecting %d,%d,%d saw %d,%d,%d", 395 x, y, redExpected, greenExpected, blueExpected, r, g, b)); 396 if (!approxEquals(redExpected, r) || !approxEquals(greenExpected, g) 397 || !approxEquals(blueExpected, b)) { 398 return false; 399 } 400 } 401 return true; 402 } 403 checkSurfaceFrameColor(int w, int h, int color)404 private boolean checkSurfaceFrameColor(int w, int h, int color) { 405 // Read a pixel from the center of the surface. Might want to read from multiple points 406 // and average them together. 407 int x = w / 2; 408 int y = h / 2; 409 GLES20.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); 410 int r = mPixelBuf.get(0) & 0xff; 411 int g = mPixelBuf.get(1) & 0xff; 412 int b = mPixelBuf.get(2) & 0xff; 413 414 int redExpected = (color >> 16) & 0xff; 415 int greenExpected = (color >> 8) & 0xff; 416 int blueExpected = color & 0xff; 417 if (approxEquals(redExpected, r) && approxEquals(greenExpected, g) 418 && approxEquals(blueExpected, b)) { 419 return true; 420 } 421 Log.i(TAG, "expected 0x" + Integer.toHexString(color) + " got 0x" 422 + Integer.toHexString(makeColor(r, g, b))); 423 return false; 424 } 425 426 /** 427 * Determines if two color values are approximately equal. 428 */ approxEquals(int expected, int actual)429 private static boolean approxEquals(int expected, int actual) { 430 // allow differences between BT.601 and BT.709 conversions during encoding/decoding for now 431 final int MAX_DELTA = 17; 432 return Math.abs(expected - actual) <= MAX_DELTA; 433 } 434 435 private static final int NUM_CODEC_CREATION = 5; 436 private static final int NUM_DISPLAY_CREATION = 10; 437 private static final int NUM_RENDERING = 10; doTestVirtualDisplayRecycles(final Context context, int numDisplays)438 void doTestVirtualDisplayRecycles(final Context context, int numDisplays) throws Exception { 439 Size maxSize = getMaxSupportedEncoderSize(); 440 if (maxSize == null) { 441 Log.i(TAG, "no codec found, skipping"); 442 return; 443 } 444 VirtualDisplayPresentation[] virtualDisplays = new VirtualDisplayPresentation[numDisplays]; 445 for (int i = 0; i < NUM_CODEC_CREATION; i++) { 446 mCodecConfigReceived = false; 447 mCodecBufferReceived = false; 448 if (DBG) { 449 Log.i(TAG, "start encoding"); 450 } 451 EncodingHelper encodingHelper = new EncodingHelper(); 452 try { 453 mEncodingSurface = encodingHelper.startEncoding( 454 MIME_TYPE, maxSize.getWidth(), maxSize.getHeight(), mEncoderEventListener); 455 GlCompositor compositor = new GlCompositor(context); 456 if (DBG) { 457 Log.i(TAG, "start composition"); 458 } 459 compositor.startComposition(mEncodingSurface, 460 maxSize.getWidth(), maxSize.getHeight(), numDisplays); 461 for (int j = 0; j < NUM_DISPLAY_CREATION; j++) { 462 if (DBG) { 463 Log.i(TAG, "create display"); 464 } 465 for (int k = 0; k < numDisplays; k++) { 466 virtualDisplays[k] = 467 new VirtualDisplayPresentation(context, 468 compositor.getWindowSurface(k), 469 maxSize.getWidth()/numDisplays, maxSize.getHeight()); 470 virtualDisplays[k].createVirtualDisplay(); 471 virtualDisplays[k].createPresentation(); 472 } 473 if (DBG) { 474 Log.i(TAG, "start rendering"); 475 } 476 for (int k = 0; k < NUM_RENDERING; k++) { 477 for (int l = 0; l < numDisplays; l++) { 478 virtualDisplays[l].doRendering(COLOR_RED); 479 } 480 // do not care how many frames are actually rendered. 481 Thread.sleep(1); 482 } 483 for (int k = 0; k < numDisplays; k++) { 484 virtualDisplays[k].dismissPresentation(); 485 virtualDisplays[k].destroyVirtualDisplay(); 486 } 487 compositor.recreateWindows(); 488 } 489 if (DBG) { 490 Log.i(TAG, "stop composition"); 491 } 492 compositor.stopComposition(); 493 } finally { 494 if (DBG) { 495 Log.i(TAG, "stop encoding"); 496 } 497 encodingHelper.stopEncoding(); 498 assertTrue(mCodecConfigReceived); 499 assertTrue(mCodecBufferReceived); 500 } 501 } 502 } 503 504 interface EncoderEventListener { onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info)505 public void onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info); onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info)506 public void onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info); onError(String errorMessage)507 public void onError(String errorMessage); 508 } 509 510 private class EncodingHelper { 511 private MediaCodec mEncoder; 512 private volatile boolean mStopEncoding = false; 513 private EncoderEventListener mEventListener; 514 private String mMimeType; 515 private int mW; 516 private int mH; 517 private Thread mEncodingThread; 518 private Surface mEncodingSurface; 519 private Semaphore mInitCompleted = new Semaphore(0); 520 startEncoding(String mimeType, int w, int h, EncoderEventListener eventListener)521 Surface startEncoding(String mimeType, int w, int h, EncoderEventListener eventListener) { 522 mStopEncoding = false; 523 mMimeType = mimeType; 524 mW = w; 525 mH = h; 526 mEventListener = eventListener; 527 mEncodingThread = new Thread(new Runnable() { 528 @Override 529 public void run() { 530 try { 531 doEncoding(); 532 } catch (Exception e) { 533 e.printStackTrace(); 534 mEventListener.onError(e.toString()); 535 } 536 } 537 }); 538 mEncodingThread.start(); 539 try { 540 if (DBG) { 541 Log.i(TAG, "wait for encoder init"); 542 } 543 mInitCompleted.acquire(); 544 if (DBG) { 545 Log.i(TAG, "wait for encoder done"); 546 } 547 } catch (InterruptedException e) { 548 fail("should not happen"); 549 } 550 return mEncodingSurface; 551 } 552 stopEncoding()553 void stopEncoding() { 554 try { 555 mStopEncoding = true; 556 mEncodingThread.join(); 557 } catch(InterruptedException e) { 558 // just ignore 559 } finally { 560 mEncodingThread = null; 561 } 562 } 563 doEncoding()564 private void doEncoding() throws Exception { 565 final int TIMEOUT_USEC_NORMAL = 1000000; 566 MediaFormat format = MediaFormat.createVideoFormat(mMimeType, mW, mH); 567 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 568 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 569 int bitRate = BITRATE_DEFAULT; 570 if (mW == 1920 && mH == 1080) { 571 bitRate = BITRATE_1080p; 572 } else if (mW == 1280 && mH == 720) { 573 bitRate = BITRATE_720p; 574 } else if (mW == 800 && mH == 480) { 575 bitRate = BITRATE_800x480; 576 } 577 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 578 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 579 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 580 format.setInteger(MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_LIMITED); 581 format.setInteger(MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT601_PAL); 582 format.setInteger(MediaFormat.KEY_COLOR_TRANSFER, MediaFormat.COLOR_TRANSFER_SDR_VIDEO); 583 584 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 585 String codecName = null; 586 if ((codecName = mcl.findEncoderForFormat(format)) == null) { 587 throw new RuntimeException("encoder "+ MIME_TYPE + " not support : " + format.toString()); 588 } 589 590 try { 591 mEncoder = MediaCodec.createByCodecName(codecName); 592 mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 593 mEncodingSurface = mEncoder.createInputSurface(); 594 mEncoder.start(); 595 mInitCompleted.release(); 596 if (DBG) { 597 Log.i(TAG, "starting encoder"); 598 } 599 ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers(); 600 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 601 while (!mStopEncoding) { 602 int index = mEncoder.dequeueOutputBuffer(info, TIMEOUT_USEC_NORMAL); 603 if (DBG) { 604 Log.i(TAG, "encoder dequeOutputBuffer returned " + index); 605 } 606 if (index >= 0) { 607 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 608 Log.i(TAG, "codec config data"); 609 ByteBuffer encodedData = encoderOutputBuffers[index]; 610 encodedData.position(info.offset); 611 encodedData.limit(info.offset + info.size); 612 mEventListener.onCodecConfig(encodedData, info); 613 mEncoder.releaseOutputBuffer(index, false); 614 } else if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 615 Log.i(TAG, "EOS, stopping encoding"); 616 break; 617 } else { 618 ByteBuffer encodedData = encoderOutputBuffers[index]; 619 encodedData.position(info.offset); 620 encodedData.limit(info.offset + info.size); 621 mEventListener.onBufferReady(encodedData, info); 622 mEncoder.releaseOutputBuffer(index, false); 623 } 624 } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED){ 625 Log.i(TAG, "output buffer changed"); 626 encoderOutputBuffers = mEncoder.getOutputBuffers(); 627 } 628 } 629 } catch (Exception e) { 630 e.printStackTrace(); 631 throw e; 632 } finally { 633 if (mEncoder != null) { 634 mEncoder.stop(); 635 mEncoder.release(); 636 mEncoder = null; 637 } 638 if (mEncodingSurface != null) { 639 mEncodingSurface.release(); 640 mEncodingSurface = null; 641 } 642 } 643 } 644 } 645 646 /** 647 * Handles composition of multiple SurfaceTexture into a single Surface 648 */ 649 private static class GlCompositor implements SurfaceTexture.OnFrameAvailableListener { 650 private final Context mContext; 651 private Surface mSurface; 652 private int mWidth; 653 private int mHeight; 654 private volatile int mNumWindows; 655 private GlWindow mTopWindow; 656 private Thread mCompositionThread; 657 private Semaphore mStartCompletionSemaphore; 658 private Semaphore mRecreationCompletionSemaphore; 659 private Looper mLooper; 660 private Handler mHandler; 661 private InputSurface mEglHelper; 662 private int mGlProgramId = 0; 663 private int mGluMVPMatrixHandle; 664 private int mGluSTMatrixHandle; 665 private int mGlaPositionHandle; 666 private int mGlaTextureHandle; 667 private float[] mMVPMatrix = new float[16]; 668 private TopWindowVirtualDisplayPresentation mTopPresentation; 669 670 private static final String VERTEX_SHADER = 671 "uniform mat4 uMVPMatrix;\n" + 672 "uniform mat4 uSTMatrix;\n" + 673 "attribute vec4 aPosition;\n" + 674 "attribute vec4 aTextureCoord;\n" + 675 "varying vec2 vTextureCoord;\n" + 676 "void main() {\n" + 677 " gl_Position = uMVPMatrix * aPosition;\n" + 678 " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + 679 "}\n"; 680 681 private static final String FRAGMENT_SHADER = 682 "#extension GL_OES_EGL_image_external : require\n" + 683 "precision mediump float;\n" + 684 "varying vec2 vTextureCoord;\n" + 685 "uniform samplerExternalOES sTexture;\n" + 686 "void main() {\n" + 687 " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + 688 "}\n"; 689 GlCompositor(Context context)690 public GlCompositor(Context context) { 691 mContext = context; 692 } 693 startComposition(Surface surface, int w, int h, int numWindows)694 void startComposition(Surface surface, int w, int h, int numWindows) throws Exception { 695 mSurface = surface; 696 mWidth = w; 697 mHeight = h; 698 mNumWindows = numWindows; 699 mCompositionThread = new Thread(new CompositionRunnable()); 700 mStartCompletionSemaphore = new Semaphore(0); 701 mCompositionThread.start(); 702 waitForStartCompletion(); 703 } 704 stopComposition()705 void stopComposition() { 706 try { 707 if (mLooper != null) { 708 mLooper.quit(); 709 mCompositionThread.join(); 710 } 711 } catch (InterruptedException e) { 712 // don't care 713 } 714 mCompositionThread = null; 715 mSurface = null; 716 mStartCompletionSemaphore = null; 717 } 718 getWindowSurface(int windowIndex)719 Surface getWindowSurface(int windowIndex) { 720 return mTopPresentation.getSurface(windowIndex); 721 } 722 recreateWindows()723 void recreateWindows() throws Exception { 724 mRecreationCompletionSemaphore = new Semaphore(0); 725 Message msg = mHandler.obtainMessage(CompositionHandler.DO_RECREATE_WINDOWS); 726 mHandler.sendMessage(msg); 727 if(!mRecreationCompletionSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, 728 TimeUnit.MILLISECONDS)) { 729 fail("recreation timeout"); 730 } 731 mTopPresentation.waitForSurfaceReady(DEFAULT_WAIT_TIMEOUT_MS); 732 } 733 734 @Override onFrameAvailable(SurfaceTexture surface)735 public void onFrameAvailable(SurfaceTexture surface) { 736 if (DBG) { 737 Log.i(TAG, "onFrameAvailable " + surface); 738 } 739 GlWindow w = mTopWindow; 740 if (w != null) { 741 w.markTextureUpdated(); 742 requestUpdate(); 743 } else { 744 Log.w(TAG, "top window gone"); 745 } 746 } 747 requestUpdate()748 private void requestUpdate() { 749 Thread compositionThread = mCompositionThread; 750 if (compositionThread == null || !compositionThread.isAlive()) { 751 return; 752 } 753 Message msg = mHandler.obtainMessage(CompositionHandler.DO_RENDERING); 754 mHandler.sendMessage(msg); 755 } 756 loadShader(int shaderType, String source)757 private int loadShader(int shaderType, String source) throws GlException { 758 int shader = GLES20.glCreateShader(shaderType); 759 checkGlError("glCreateShader type=" + shaderType); 760 GLES20.glShaderSource(shader, source); 761 GLES20.glCompileShader(shader); 762 int[] compiled = new int[1]; 763 GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); 764 if (compiled[0] == 0) { 765 Log.e(TAG, "Could not compile shader " + shaderType + ":"); 766 Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); 767 GLES20.glDeleteShader(shader); 768 shader = 0; 769 } 770 return shader; 771 } 772 createProgram(String vertexSource, String fragmentSource)773 private int createProgram(String vertexSource, String fragmentSource) throws GlException { 774 int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); 775 if (vertexShader == 0) { 776 return 0; 777 } 778 int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); 779 if (pixelShader == 0) { 780 return 0; 781 } 782 783 int program = GLES20.glCreateProgram(); 784 checkGlError("glCreateProgram"); 785 if (program == 0) { 786 Log.e(TAG, "Could not create program"); 787 } 788 GLES20.glAttachShader(program, vertexShader); 789 checkGlError("glAttachShader"); 790 GLES20.glAttachShader(program, pixelShader); 791 checkGlError("glAttachShader"); 792 GLES20.glLinkProgram(program); 793 int[] linkStatus = new int[1]; 794 GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); 795 if (linkStatus[0] != GLES20.GL_TRUE) { 796 Log.e(TAG, "Could not link program: "); 797 Log.e(TAG, GLES20.glGetProgramInfoLog(program)); 798 GLES20.glDeleteProgram(program); 799 program = 0; 800 } 801 return program; 802 } 803 initGl()804 private void initGl() throws GlException { 805 mEglHelper = new InputSurface(mSurface); 806 mEglHelper.makeCurrent(); 807 mGlProgramId = createProgram(VERTEX_SHADER, FRAGMENT_SHADER); 808 mGlaPositionHandle = GLES20.glGetAttribLocation(mGlProgramId, "aPosition"); 809 checkGlError("glGetAttribLocation aPosition"); 810 if (mGlaPositionHandle == -1) { 811 throw new RuntimeException("Could not get attrib location for aPosition"); 812 } 813 mGlaTextureHandle = GLES20.glGetAttribLocation(mGlProgramId, "aTextureCoord"); 814 checkGlError("glGetAttribLocation aTextureCoord"); 815 if (mGlaTextureHandle == -1) { 816 throw new RuntimeException("Could not get attrib location for aTextureCoord"); 817 } 818 mGluMVPMatrixHandle = GLES20.glGetUniformLocation(mGlProgramId, "uMVPMatrix"); 819 checkGlError("glGetUniformLocation uMVPMatrix"); 820 if (mGluMVPMatrixHandle == -1) { 821 throw new RuntimeException("Could not get attrib location for uMVPMatrix"); 822 } 823 mGluSTMatrixHandle = GLES20.glGetUniformLocation(mGlProgramId, "uSTMatrix"); 824 checkGlError("glGetUniformLocation uSTMatrix"); 825 if (mGluSTMatrixHandle == -1) { 826 throw new RuntimeException("Could not get attrib location for uSTMatrix"); 827 } 828 Matrix.setIdentityM(mMVPMatrix, 0); 829 Log.i(TAG, "initGl w:" + mWidth + " h:" + mHeight); 830 GLES20.glViewport(0, 0, mWidth, mHeight); 831 float[] vMatrix = new float[16]; 832 float[] projMatrix = new float[16]; 833 // max window is from (0,0) to (mWidth - 1, mHeight - 1) 834 float wMid = mWidth / 2f; 835 float hMid = mHeight / 2f; 836 // look from positive z to hide windows in lower z 837 Matrix.setLookAtM(vMatrix, 0, wMid, hMid, 5f, wMid, hMid, 0f, 0f, 1.0f, 0.0f); 838 Matrix.orthoM(projMatrix, 0, -wMid, wMid, -hMid, hMid, 1, 10); 839 Matrix.multiplyMM(mMVPMatrix, 0, projMatrix, 0, vMatrix, 0); 840 createWindows(); 841 842 } 843 createWindows()844 private void createWindows() throws GlException { 845 mTopWindow = new GlWindow(this, 0, 0, mWidth, mHeight); 846 mTopWindow.init(); 847 mTopPresentation = new TopWindowVirtualDisplayPresentation(mContext, 848 mTopWindow.getSurface(), mWidth, mHeight, mNumWindows); 849 mTopPresentation.createVirtualDisplay(); 850 mTopPresentation.createPresentation(); 851 ((TopWindowPresentation) mTopPresentation.getPresentation()).populateWindows(); 852 } 853 cleanupGl()854 private void cleanupGl() { 855 if (mTopPresentation != null) { 856 mTopPresentation.dismissPresentation(); 857 mTopPresentation.destroyVirtualDisplay(); 858 mTopPresentation = null; 859 } 860 if (mTopWindow != null) { 861 mTopWindow.cleanup(); 862 mTopWindow = null; 863 } 864 if (mEglHelper != null) { 865 mEglHelper.release(); 866 mEglHelper = null; 867 } 868 } 869 doGlRendering()870 private void doGlRendering() throws GlException { 871 if (DBG) { 872 Log.i(TAG, "doGlRendering"); 873 } 874 mTopWindow.updateTexImageIfNecessary(); 875 GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f); 876 GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); 877 878 GLES20.glUseProgram(mGlProgramId); 879 GLES20.glUniformMatrix4fv(mGluMVPMatrixHandle, 1, false, mMVPMatrix, 0); 880 mTopWindow.onDraw(mGluSTMatrixHandle, mGlaPositionHandle, mGlaTextureHandle); 881 checkGlError("window draw"); 882 if (DBG) { 883 final IntBuffer pixels = IntBuffer.allocate(1); 884 GLES20.glReadPixels(mWidth / 2, mHeight / 2, 1, 1, 885 GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixels); 886 Log.i(TAG, "glReadPixels returned 0x" + Integer.toHexString(pixels.get(0))); 887 } 888 mEglHelper.swapBuffers(); 889 } 890 doRecreateWindows()891 private void doRecreateWindows() throws GlException { 892 mTopPresentation.dismissPresentation(); 893 mTopPresentation.destroyVirtualDisplay(); 894 mTopWindow.cleanup(); 895 createWindows(); 896 mRecreationCompletionSemaphore.release(); 897 } 898 waitForStartCompletion()899 private void waitForStartCompletion() throws Exception { 900 if (!mStartCompletionSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, 901 TimeUnit.MILLISECONDS)) { 902 fail("start timeout"); 903 } 904 mStartCompletionSemaphore = null; 905 mTopPresentation.waitForSurfaceReady(DEFAULT_WAIT_TIMEOUT_MS); 906 } 907 908 private class CompositionRunnable implements Runnable { 909 @Override run()910 public void run() { 911 try { 912 Looper.prepare(); 913 mLooper = Looper.myLooper(); 914 mHandler = new CompositionHandler(); 915 initGl(); 916 // init done 917 mStartCompletionSemaphore.release(); 918 Looper.loop(); 919 } catch (GlException e) { 920 e.printStackTrace(); 921 fail("got gl exception"); 922 } finally { 923 cleanupGl(); 924 mHandler = null; 925 mLooper = null; 926 } 927 } 928 } 929 930 private class CompositionHandler extends Handler { 931 private static final int DO_RENDERING = 1; 932 private static final int DO_RECREATE_WINDOWS = 2; 933 934 @Override handleMessage(Message msg)935 public void handleMessage(Message msg) { 936 try { 937 switch(msg.what) { 938 case DO_RENDERING: { 939 doGlRendering(); 940 } break; 941 case DO_RECREATE_WINDOWS: { 942 doRecreateWindows(); 943 } break; 944 } 945 } catch (GlException e) { 946 //ignore as this can happen during tearing down 947 } 948 } 949 } 950 951 private class GlWindow { 952 private static final int FLOAT_SIZE_BYTES = 4; 953 private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; 954 private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; 955 private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; 956 private int mBlX; 957 private int mBlY; 958 private int mWidth; 959 private int mHeight; 960 private int mTextureId = 0; // 0 is invalid 961 private volatile SurfaceTexture mSurfaceTexture; 962 private volatile Surface mSurface; 963 private FloatBuffer mVerticesData; 964 private float[] mSTMatrix = new float[16]; 965 private AtomicInteger mNumTextureUpdated = new AtomicInteger(0); 966 private GlCompositor mCompositor; 967 968 /** 969 * @param blX X coordinate of bottom-left point of window 970 * @param blY Y coordinate of bottom-left point of window 971 * @param w window width 972 * @param h window height 973 */ GlWindow(GlCompositor compositor, int blX, int blY, int w, int h)974 public GlWindow(GlCompositor compositor, int blX, int blY, int w, int h) { 975 mCompositor = compositor; 976 mBlX = blX; 977 mBlY = blY; 978 mWidth = w; 979 mHeight = h; 980 int trX = blX + w; 981 int trY = blY + h; 982 float[] vertices = new float[] { 983 // x, y, z, u, v 984 mBlX, mBlY, 0, 0, 0, 985 trX, mBlY, 0, 1, 0, 986 mBlX, trY, 0, 0, 1, 987 trX, trY, 0, 1, 1 988 }; 989 Log.i(TAG, "create window " + this + " blX:" + mBlX + " blY:" + mBlY + " trX:" + 990 trX + " trY:" + trY); 991 mVerticesData = ByteBuffer.allocateDirect( 992 vertices.length * FLOAT_SIZE_BYTES) 993 .order(ByteOrder.nativeOrder()).asFloatBuffer(); 994 mVerticesData.put(vertices).position(0); 995 } 996 997 /** 998 * initialize the window for composition. counter-part is cleanup() 999 * @throws GlException 1000 */ init()1001 public void init() throws GlException { 1002 int[] textures = new int[1]; 1003 GLES20.glGenTextures(1, textures, 0); 1004 1005 mTextureId = textures[0]; 1006 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); 1007 checkGlError("glBindTexture mTextureID"); 1008 1009 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 1010 GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); 1011 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 1012 GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); 1013 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, 1014 GLES20.GL_CLAMP_TO_EDGE); 1015 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, 1016 GLES20.GL_CLAMP_TO_EDGE); 1017 checkGlError("glTexParameter"); 1018 mSurfaceTexture = new SurfaceTexture(mTextureId); 1019 mSurfaceTexture.setDefaultBufferSize(mWidth, mHeight); 1020 mSurface = new Surface(mSurfaceTexture); 1021 mSurfaceTexture.setOnFrameAvailableListener(mCompositor); 1022 } 1023 cleanup()1024 public void cleanup() { 1025 mNumTextureUpdated.set(0); 1026 if (mTextureId != 0) { 1027 int[] textures = new int[] { 1028 mTextureId 1029 }; 1030 GLES20.glDeleteTextures(1, textures, 0); 1031 } 1032 GLES20.glFinish(); 1033 if (mSurface != null) { 1034 mSurface.release(); 1035 mSurface = null; 1036 } 1037 if (mSurfaceTexture != null) { 1038 mSurfaceTexture.release(); 1039 mSurfaceTexture = null; 1040 } 1041 } 1042 1043 /** 1044 * make texture as updated so that it can be updated in the next rendering. 1045 */ markTextureUpdated()1046 public void markTextureUpdated() { 1047 mNumTextureUpdated.incrementAndGet(); 1048 } 1049 1050 /** 1051 * update texture for rendering if it is updated. 1052 */ updateTexImageIfNecessary()1053 public void updateTexImageIfNecessary() { 1054 int numTextureUpdated = mNumTextureUpdated.getAndDecrement(); 1055 if (numTextureUpdated > 0) { 1056 if (DBG) { 1057 Log.i(TAG, "updateTexImageIfNecessary " + this); 1058 } 1059 mSurfaceTexture.updateTexImage(); 1060 mSurfaceTexture.getTransformMatrix(mSTMatrix); 1061 } 1062 if (numTextureUpdated < 0) { 1063 fail("should not happen"); 1064 } 1065 } 1066 1067 /** 1068 * draw the window. It will not be drawn at all if the window is not visible. 1069 * @param uSTMatrixHandle shader handler for the STMatrix for texture coordinates 1070 * mapping 1071 * @param aPositionHandle shader handle for vertex position. 1072 * @param aTextureHandle shader handle for texture 1073 */ onDraw(int uSTMatrixHandle, int aPositionHandle, int aTextureHandle)1074 public void onDraw(int uSTMatrixHandle, int aPositionHandle, int aTextureHandle) { 1075 GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 1076 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); 1077 mVerticesData.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); 1078 GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false, 1079 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mVerticesData); 1080 GLES20.glEnableVertexAttribArray(aPositionHandle); 1081 1082 mVerticesData.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); 1083 GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false, 1084 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mVerticesData); 1085 GLES20.glEnableVertexAttribArray(aTextureHandle); 1086 GLES20.glUniformMatrix4fv(uSTMatrixHandle, 1, false, mSTMatrix, 0); 1087 GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 1088 } 1089 getSurfaceTexture()1090 public SurfaceTexture getSurfaceTexture() { 1091 return mSurfaceTexture; 1092 } 1093 getSurface()1094 public Surface getSurface() { 1095 return mSurface; 1096 } 1097 } 1098 } 1099 checkGlError(String op)1100 static void checkGlError(String op) throws GlException { 1101 int error; 1102 while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { 1103 Log.e(TAG, op + ": glError " + error); 1104 throw new GlException(op + ": glError " + error); 1105 } 1106 } 1107 1108 public static class GlException extends Exception { GlException(String msg)1109 public GlException(String msg) { 1110 super(msg); 1111 } 1112 } 1113 1114 private interface Renderer { doRendering(final int color)1115 void doRendering(final int color) throws Exception; 1116 } 1117 1118 private static class RotateVirtualDisplayPresentation extends VirtualDisplayPresentation { 1119 RotateVirtualDisplayPresentation(Context context, Surface surface, int w, int h)1120 RotateVirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1121 super(context, surface, w, h); 1122 } 1123 1124 @Override doCreatePresentation()1125 protected TestPresentationBase doCreatePresentation() { 1126 return new TestRotatePresentation(mContext, mVirtualDisplay.getDisplay()); 1127 } 1128 1129 } 1130 1131 private static class VirtualDisplayPresentation implements Renderer { 1132 protected final Context mContext; 1133 protected final Surface mSurface; 1134 protected final int mWidth; 1135 protected final int mHeight; 1136 protected VirtualDisplay mVirtualDisplay; 1137 protected TestPresentationBase mPresentation; 1138 private final DisplayManager mDisplayManager; 1139 VirtualDisplayPresentation(Context context, Surface surface, int w, int h)1140 VirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1141 mContext = context; 1142 mSurface = surface; 1143 mWidth = w; 1144 mHeight = h; 1145 mDisplayManager = (DisplayManager)context.getSystemService(Context.DISPLAY_SERVICE); 1146 } 1147 createVirtualDisplay()1148 void createVirtualDisplay() { 1149 runOnMainSync(new Runnable() { 1150 @Override 1151 public void run() { 1152 mVirtualDisplay = mDisplayManager.createVirtualDisplay( 1153 TAG, mWidth, mHeight, 200, mSurface, 1154 DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY | 1155 DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION); 1156 } 1157 }); 1158 } 1159 destroyVirtualDisplay()1160 void destroyVirtualDisplay() { 1161 runOnMainSync(new Runnable() { 1162 @Override 1163 public void run() { 1164 mVirtualDisplay.release(); 1165 } 1166 }); 1167 } 1168 createPresentation()1169 void createPresentation() { 1170 runOnMainSync(new Runnable() { 1171 @Override 1172 public void run() { 1173 mPresentation = doCreatePresentation(); 1174 mPresentation.show(); 1175 } 1176 }); 1177 } 1178 doCreatePresentation()1179 protected TestPresentationBase doCreatePresentation() { 1180 return new TestPresentation(mContext, mVirtualDisplay.getDisplay()); 1181 } 1182 getPresentation()1183 TestPresentationBase getPresentation() { 1184 return mPresentation; 1185 } 1186 dismissPresentation()1187 void dismissPresentation() { 1188 runOnMainSync(new Runnable() { 1189 @Override 1190 public void run() { 1191 mPresentation.dismiss(); 1192 } 1193 }); 1194 } 1195 1196 @Override doRendering(final int color)1197 public void doRendering(final int color) throws Exception { 1198 runOnMainSync(new Runnable() { 1199 @Override 1200 public void run() { 1201 mPresentation.doRendering(color); 1202 } 1203 }); 1204 } 1205 } 1206 1207 private static class TestPresentationBase extends Presentation { 1208 TestPresentationBase(Context outerContext, Display display)1209 public TestPresentationBase(Context outerContext, Display display) { 1210 // This theme is required to prevent an extra view from obscuring the presentation 1211 super(outerContext, display, 1212 android.R.style.Theme_Holo_Light_NoActionBar_TranslucentDecor); 1213 getWindow().addFlags(WindowManager.LayoutParams.FLAG_LOCAL_FOCUS_MODE); 1214 getWindow().addFlags(WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED); 1215 } 1216 doRendering(int color)1217 public void doRendering(int color) { 1218 // to be implemented by child 1219 } 1220 } 1221 1222 private static class TestPresentation extends TestPresentationBase { 1223 private ImageView mImageView; 1224 TestPresentation(Context outerContext, Display display)1225 public TestPresentation(Context outerContext, Display display) { 1226 super(outerContext, display); 1227 } 1228 1229 @Override onCreate(Bundle savedInstanceState)1230 protected void onCreate(Bundle savedInstanceState) { 1231 super.onCreate(savedInstanceState); 1232 mImageView = new ImageView(getContext()); 1233 mImageView.setImageDrawable(new ColorDrawable(COLOR_RED)); 1234 mImageView.setLayoutParams(new LayoutParams( 1235 LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); 1236 setContentView(mImageView); 1237 } 1238 doRendering(int color)1239 public void doRendering(int color) { 1240 if (DBG) { 1241 Log.i(TAG, "doRendering " + Integer.toHexString(color)); 1242 } 1243 mImageView.setImageDrawable(new ColorDrawable(color)); 1244 } 1245 } 1246 1247 private static class TestRotatePresentation extends TestPresentationBase { 1248 static final int[] kColors = new int[] {COLOR_GREY, COLOR_RED, COLOR_GREEN, COLOR_BLUE}; 1249 private final ImageView[] mQuadrants = new ImageView[4]; 1250 TestRotatePresentation(Context outerContext, Display display)1251 public TestRotatePresentation(Context outerContext, Display display) { 1252 super(outerContext, display); 1253 } 1254 1255 @Override onCreate(Bundle savedInstanceState)1256 protected void onCreate(Bundle savedInstanceState) { 1257 super.onCreate(savedInstanceState); 1258 Context ctx = getContext(); 1259 TableLayout table = new TableLayout(ctx); 1260 ViewGroup.LayoutParams fill = new ViewGroup.LayoutParams( 1261 ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT); 1262 TableLayout.LayoutParams fillTable = new TableLayout.LayoutParams( 1263 ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT, 1f); 1264 TableRow.LayoutParams fillRow = new TableRow.LayoutParams( 1265 ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT, 1f); 1266 table.setLayoutParams(fill); 1267 table.setStretchAllColumns(true); 1268 TableRow rows[] = new TableRow[] {new TableRow(ctx), new TableRow(ctx)}; 1269 for (int i = 0; i < mQuadrants.length; i++) { 1270 mQuadrants[i] = new ImageView(ctx); 1271 mQuadrants[i].setImageDrawable(new ColorDrawable(kColors[i])); 1272 rows[i / 2].addView(mQuadrants[i], fillRow); 1273 } 1274 for (TableRow row: rows) { 1275 table.addView(row, fillTable); 1276 } 1277 setContentView(table); 1278 Log.v(TAG, "setContentView(table)"); 1279 } 1280 1281 @Override doRendering(int color)1282 public void doRendering(int color) { 1283 Log.v(TAG, "doRendering: ignoring color: " + Integer.toHexString(color)); 1284 for (int i = 0; i < mQuadrants.length; i++) { 1285 mQuadrants[i].setImageDrawable(new ColorDrawable(kColors[i])); 1286 } 1287 } 1288 1289 } 1290 1291 private static class TopWindowPresentation extends TestPresentationBase { 1292 private FrameLayout[] mWindowsLayout = new FrameLayout[MAX_NUM_WINDOWS]; 1293 private CompositionTextureView[] mWindows = new CompositionTextureView[MAX_NUM_WINDOWS]; 1294 private final int mNumWindows; 1295 private final Semaphore mWindowWaitSemaphore = new Semaphore(0); 1296 TopWindowPresentation(int numWindows, Context outerContext, Display display)1297 public TopWindowPresentation(int numWindows, Context outerContext, Display display) { 1298 super(outerContext, display); 1299 mNumWindows = numWindows; 1300 } 1301 1302 @Override onCreate(Bundle savedInstanceState)1303 protected void onCreate(Bundle savedInstanceState) { 1304 super.onCreate(savedInstanceState); 1305 if (DBG) { 1306 Log.i(TAG, "TopWindowPresentation onCreate, numWindows " + mNumWindows); 1307 } 1308 setContentView(R.layout.composition_layout); 1309 mWindowsLayout[0] = (FrameLayout) findViewById(R.id.window0); 1310 mWindowsLayout[1] = (FrameLayout) findViewById(R.id.window1); 1311 mWindowsLayout[2] = (FrameLayout) findViewById(R.id.window2); 1312 } 1313 populateWindows()1314 public void populateWindows() { 1315 runOnMain(new Runnable() { 1316 public void run() { 1317 for (int i = 0; i < mNumWindows; i++) { 1318 mWindows[i] = new CompositionTextureView(getContext()); 1319 mWindows[i].setLayoutParams(new ViewGroup.LayoutParams( 1320 ViewGroup.LayoutParams.MATCH_PARENT, 1321 ViewGroup.LayoutParams.MATCH_PARENT)); 1322 mWindowsLayout[i].setVisibility(View.VISIBLE); 1323 mWindowsLayout[i].addView(mWindows[i]); 1324 mWindows[i].startListening(); 1325 } 1326 mWindowWaitSemaphore.release(); 1327 } 1328 }); 1329 } 1330 waitForSurfaceReady(long timeoutMs)1331 public void waitForSurfaceReady(long timeoutMs) throws Exception { 1332 mWindowWaitSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, TimeUnit.MILLISECONDS); 1333 for (int i = 0; i < mNumWindows; i++) { 1334 if(!mWindows[i].waitForSurfaceReady(timeoutMs)) { 1335 fail("surface wait timeout"); 1336 } 1337 } 1338 } 1339 getSurface(int windowIndex)1340 public Surface getSurface(int windowIndex) { 1341 Surface surface = mWindows[windowIndex].getSurface(); 1342 assertNotNull(surface); 1343 return surface; 1344 } 1345 } 1346 1347 private static class TopWindowVirtualDisplayPresentation extends VirtualDisplayPresentation { 1348 private final int mNumWindows; 1349 TopWindowVirtualDisplayPresentation(Context context, Surface surface, int w, int h, int numWindows)1350 TopWindowVirtualDisplayPresentation(Context context, Surface surface, int w, int h, 1351 int numWindows) { 1352 super(context, surface, w, h); 1353 assertNotNull(surface); 1354 mNumWindows = numWindows; 1355 } 1356 waitForSurfaceReady(long timeoutMs)1357 void waitForSurfaceReady(long timeoutMs) throws Exception { 1358 ((TopWindowPresentation) mPresentation).waitForSurfaceReady(timeoutMs); 1359 } 1360 getSurface(int windowIndex)1361 Surface getSurface(int windowIndex) { 1362 return ((TopWindowPresentation) mPresentation).getSurface(windowIndex); 1363 } 1364 doCreatePresentation()1365 protected TestPresentationBase doCreatePresentation() { 1366 return new TopWindowPresentation(mNumWindows, mContext, mVirtualDisplay.getDisplay()); 1367 } 1368 } 1369 1370 private static class RemoteVirtualDisplayPresentation implements Renderer { 1371 /** argument: Surface, int w, int h, return none */ 1372 private static final int BINDER_CMD_START = IBinder.FIRST_CALL_TRANSACTION; 1373 /** argument: int color, return none */ 1374 private static final int BINDER_CMD_RENDER = IBinder.FIRST_CALL_TRANSACTION + 1; 1375 1376 private final Context mContext; 1377 private final Surface mSurface; 1378 private final int mWidth; 1379 private final int mHeight; 1380 1381 private IBinder mService; 1382 private final Semaphore mConnectionWait = new Semaphore(0); 1383 private final ServiceConnection mConnection = new ServiceConnection() { 1384 1385 public void onServiceConnected(ComponentName arg0, IBinder arg1) { 1386 mService = arg1; 1387 mConnectionWait.release(); 1388 } 1389 1390 public void onServiceDisconnected(ComponentName arg0) { 1391 //ignore 1392 } 1393 1394 }; 1395 RemoteVirtualDisplayPresentation(Context context, Surface surface, int w, int h)1396 RemoteVirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1397 mContext = context; 1398 mSurface = surface; 1399 mWidth = w; 1400 mHeight = h; 1401 } 1402 connect()1403 void connect() throws Exception { 1404 Intent intent = new Intent(); 1405 intent.setClassName("android.media.cts", 1406 "android.media.cts.RemoteVirtualDisplayService"); 1407 mContext.bindService(intent, mConnection, Context.BIND_AUTO_CREATE); 1408 if (!mConnectionWait.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, TimeUnit.MILLISECONDS)) { 1409 fail("cannot bind to service"); 1410 } 1411 } 1412 disconnect()1413 void disconnect() { 1414 mContext.unbindService(mConnection); 1415 } 1416 start()1417 void start() throws Exception { 1418 Parcel parcel = Parcel.obtain(); 1419 mSurface.writeToParcel(parcel, 0); 1420 parcel.writeInt(mWidth); 1421 parcel.writeInt(mHeight); 1422 mService.transact(BINDER_CMD_START, parcel, null, 0); 1423 } 1424 1425 @Override doRendering(int color)1426 public void doRendering(int color) throws Exception { 1427 Parcel parcel = Parcel.obtain(); 1428 parcel.writeInt(color); 1429 mService.transact(BINDER_CMD_RENDER, parcel, null, 0); 1430 } 1431 } 1432 getMaxSupportedEncoderSize()1433 private static Size getMaxSupportedEncoderSize() { 1434 final Size[] standardSizes = new Size[] { 1435 new Size(1920, 1080), 1436 new Size(1280, 720), 1437 new Size(720, 480), 1438 new Size(352, 576) 1439 }; 1440 1441 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 1442 for (Size sz : standardSizes) { 1443 MediaFormat format = MediaFormat.createVideoFormat( 1444 MIME_TYPE, sz.getWidth(), sz.getHeight()); 1445 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 1446 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 1447 int bitRate = BITRATE_DEFAULT; 1448 if (sz.getWidth() == 1920 && sz.getHeight() == 1080) { 1449 bitRate = BITRATE_1080p; 1450 } else if (sz.getWidth() == 1280 && sz.getHeight() == 720) { 1451 bitRate = BITRATE_720p; 1452 } else if (sz.getWidth() == 800 && sz.getHeight() == 480) { 1453 bitRate = BITRATE_800x480; 1454 } 1455 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1456 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1457 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 1458 Log.i(TAG,"format = " + format.toString()); 1459 if (mcl.findEncoderForFormat(format) != null) { 1460 return sz; 1461 } 1462 } 1463 return null; 1464 } 1465 1466 /** 1467 * Check maximum concurrent encoding / decoding resolution allowed. 1468 * Some H/Ws cannot support maximum resolution reported in encoder if decoder is running 1469 * at the same time. 1470 * Check is done for 4 different levels: 1080p, 720p, 800x480, 480p 1471 * (The last one is required by CDD.) 1472 */ checkMaxConcurrentEncodingDecodingResolution()1473 Size checkMaxConcurrentEncodingDecodingResolution() { 1474 if (isConcurrentEncodingDecodingSupported(MIME_TYPE, 1920, 1080, BITRATE_1080p)) { 1475 return new Size(1920, 1080); 1476 } else if (isConcurrentEncodingDecodingSupported(MIME_TYPE, 1280, 720, BITRATE_720p)) { 1477 return new Size(1280, 720); 1478 } else if (isConcurrentEncodingDecodingSupported(MIME_TYPE, 800, 480, BITRATE_800x480)) { 1479 return new Size(800, 480); 1480 } else if (isConcurrentEncodingDecodingSupported(MIME_TYPE, 720, 480, BITRATE_DEFAULT)) { 1481 return new Size(720, 480); 1482 } 1483 Log.i(TAG, "SKIPPING test: concurrent encoding and decoding is not supported"); 1484 return null; 1485 } 1486 isConcurrentEncodingDecodingSupported( String mimeType, int w, int h, int bitRate)1487 boolean isConcurrentEncodingDecodingSupported( 1488 String mimeType, int w, int h, int bitRate) { 1489 return isConcurrentEncodingDecodingSupported(mimeType, w, h, bitRate, null); 1490 } 1491 isConcurrentEncodingDecodingSupported( String mimeType, int w, int h, int bitRate, String decoderName)1492 boolean isConcurrentEncodingDecodingSupported( 1493 String mimeType, int w, int h, int bitRate, String decoderName) { 1494 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 1495 MediaFormat testFormat = MediaFormat.createVideoFormat(mimeType, w, h); 1496 testFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1497 testFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1498 if (mcl.findDecoderForFormat(testFormat) == null 1499 || mcl.findEncoderForFormat(testFormat) == null) { 1500 return false; 1501 } 1502 1503 MediaCodec decoder = null; 1504 OutputSurface decodingSurface = null; 1505 MediaCodec encoder = null; 1506 Surface encodingSurface = null; 1507 try { 1508 if (decoderName == null) { 1509 decoder = MediaCodec.createDecoderByType(mimeType); 1510 } else { 1511 decoder = MediaCodec.createByCodecName(decoderName); 1512 } 1513 MediaFormat decoderFormat = MediaFormat.createVideoFormat(mimeType, w, h); 1514 decodingSurface = new OutputSurface(w, h); 1515 decodingSurface.makeCurrent(); 1516 decoder.configure(decoderFormat, decodingSurface.getSurface(), null, 0); 1517 decoder.start(); 1518 1519 MediaFormat format = MediaFormat.createVideoFormat(mimeType, w, h); 1520 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 1521 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 1522 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1523 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1524 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 1525 encoder = MediaCodec.createEncoderByType(mimeType); 1526 encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 1527 encodingSurface = encoder.createInputSurface(); 1528 encoder.start(); 1529 1530 encoder.stop(); 1531 decoder.stop(); 1532 } catch (Exception e) { 1533 e.printStackTrace(); 1534 Log.i(TAG, "This H/W does not support w:" + w + " h:" + h); 1535 return false; 1536 } finally { 1537 if (encodingSurface != null) { 1538 encodingSurface.release(); 1539 } 1540 if (encoder != null) { 1541 encoder.release(); 1542 } 1543 if (decoder != null) { 1544 decoder.release(); 1545 } 1546 if (decodingSurface != null) { 1547 decodingSurface.release(); 1548 } 1549 } 1550 return true; 1551 } 1552 runOnMain(Runnable runner)1553 private static void runOnMain(Runnable runner) { 1554 sHandlerForRunOnMain.post(runner); 1555 } 1556 runOnMainSync(Runnable runner)1557 private static void runOnMainSync(Runnable runner) { 1558 SyncRunnable sr = new SyncRunnable(runner); 1559 sHandlerForRunOnMain.post(sr); 1560 sr.waitForComplete(); 1561 } 1562 1563 private static final class SyncRunnable implements Runnable { 1564 private final Runnable mTarget; 1565 private boolean mComplete; 1566 SyncRunnable(Runnable target)1567 public SyncRunnable(Runnable target) { 1568 mTarget = target; 1569 } 1570 run()1571 public void run() { 1572 mTarget.run(); 1573 synchronized (this) { 1574 mComplete = true; 1575 notifyAll(); 1576 } 1577 } 1578 waitForComplete()1579 public void waitForComplete() { 1580 synchronized (this) { 1581 while (!mComplete) { 1582 try { 1583 wait(); 1584 } catch (InterruptedException e) { 1585 //ignore 1586 } 1587 } 1588 } 1589 } 1590 } 1591 } 1592