1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.cts; 18 19 import android.app.Presentation; 20 import android.content.ComponentName; 21 import android.content.Context; 22 import android.content.Intent; 23 import android.content.ServiceConnection; 24 import android.graphics.SurfaceTexture; 25 import android.graphics.drawable.ColorDrawable; 26 import android.hardware.display.DisplayManager; 27 import android.hardware.display.VirtualDisplay; 28 import android.media.MediaCodec; 29 import android.media.MediaCodec.BufferInfo; 30 import android.media.MediaCodecInfo; 31 import android.media.MediaCodecList; 32 import android.media.MediaFormat; 33 import android.media.cts.R; 34 import android.opengl.GLES11Ext; 35 import android.opengl.GLES20; 36 import android.opengl.Matrix; 37 import android.os.Bundle; 38 import android.os.Handler; 39 import android.os.IBinder; 40 import android.os.Looper; 41 import android.os.Message; 42 import android.os.Parcel; 43 import android.platform.test.annotations.RequiresDevice; 44 import android.util.Log; 45 import android.util.Size; 46 import android.view.Display; 47 import android.view.Surface; 48 import android.view.View; 49 import android.view.ViewGroup; 50 import android.view.ViewGroup.LayoutParams; 51 import android.view.WindowManager; 52 import android.widget.FrameLayout; 53 import android.widget.ImageView; 54 import android.widget.TableLayout; 55 import android.widget.TableRow; 56 57 import androidx.test.filters.SmallTest; 58 59 import java.nio.ByteBuffer; 60 import java.nio.ByteOrder; 61 import java.nio.FloatBuffer; 62 import java.nio.IntBuffer; 63 import java.util.ArrayList; 64 import java.util.Arrays; 65 import java.util.List; 66 import java.util.concurrent.Semaphore; 67 import java.util.concurrent.TimeUnit; 68 import java.util.concurrent.atomic.AtomicInteger; 69 70 import static org.junit.Assert.assertNotNull; 71 import static org.junit.Assert.assertTrue; 72 import static org.junit.Assert.fail; 73 74 /** 75 * Impl class for tests using MediaCodec encoding with composition of multiple virtual displays. 76 */ 77 public class EncodeVirtualDisplayWithCompositionTestImpl { 78 private static final String TAG = "EncodeVirtualDisplayWithCompositionTestImpl"; 79 private static final boolean DBG = false; 80 private static final String MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC; 81 82 private static final long DEFAULT_WAIT_TIMEOUT_MS = 10000; 83 private static final long DEFAULT_WAIT_TIMEOUT_US = DEFAULT_WAIT_TIMEOUT_MS * 1000; 84 85 private static final int COLOR_RED = makeColor(100, 0, 0); 86 private static final int COLOR_GREEN = makeColor(0, 100, 0); 87 private static final int COLOR_BLUE = makeColor(0, 0, 100); 88 private static final int COLOR_GREY = makeColor(100, 100, 100); 89 90 static final int BITRATE_1080p = 20000000; 91 static final int BITRATE_720p = 14000000; 92 static final int BITRATE_800x480 = 14000000; 93 static final int BITRATE_DEFAULT = 10000000; 94 95 private static final int IFRAME_INTERVAL = 10; 96 97 private static final int MAX_NUM_WINDOWS = 3; 98 99 private static Handler sHandlerForRunOnMain = new Handler(Looper.getMainLooper()); 100 101 private Surface mEncodingSurface; 102 private OutputSurface mDecodingSurface; 103 private volatile boolean mCodecConfigReceived = false; 104 private volatile boolean mCodecBufferReceived = false; 105 private EncodingHelper mEncodingHelper; 106 private MediaCodec mDecoder; 107 private final ByteBuffer mPixelBuf = ByteBuffer.allocateDirect(4); 108 private volatile boolean mIsQuitting = false; 109 private Throwable mTestException; 110 private VirtualDisplayPresentation mLocalPresentation; 111 private RemoteVirtualDisplayPresentation mRemotePresentation; 112 private ByteBuffer[] mDecoderInputBuffers; 113 114 /** event listener for test without verifying output */ 115 private EncoderEventListener mEncoderEventListener = new EncoderEventListener() { 116 @Override 117 public void onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info) { 118 mCodecConfigReceived = true; 119 } 120 @Override 121 public void onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info) { 122 mCodecBufferReceived = true; 123 } 124 @Override 125 public void onError(String errorMessage) { 126 fail(errorMessage); 127 } 128 }; 129 130 /* TEST_COLORS static initialization; need ARGB for ColorDrawable */ makeColor(int red, int green, int blue)131 private static int makeColor(int red, int green, int blue) { 132 return 0xff << 24 | (red & 0xff) << 16 | (green & 0xff) << 8 | (blue & 0xff); 133 } 134 135 /** 136 * Run rendering test in a separate thread. This is necessary as {@link OutputSurface} requires 137 * constructing it in a non-test thread. 138 * @param w 139 * @param h 140 * @throws Exception 141 */ runTestRenderingInSeparateThread(final Context context, final String mimeType, final int w, final int h, final boolean runRemotely, final boolean multipleWindows)142 void runTestRenderingInSeparateThread(final Context context, final String mimeType, 143 final int w, final int h, final boolean runRemotely, final boolean multipleWindows) 144 throws Throwable { 145 runTestRenderingInSeparateThread( 146 context, mimeType, w, h, runRemotely, multipleWindows, /* degrees */ 0, null); 147 } 148 runTestRenderingInSeparateThread(final Context context, final String mimeType, final int w, final int h, final boolean runRemotely, final boolean multipleWindows, final int degrees, final String decoderName)149 void runTestRenderingInSeparateThread(final Context context, final String mimeType, 150 final int w, final int h, final boolean runRemotely, final boolean multipleWindows, 151 final int degrees, final String decoderName) throws Throwable { 152 mTestException = null; 153 Thread renderingThread = new Thread(new Runnable() { 154 public void run() { 155 try { 156 doTestRenderingOutput( 157 context, mimeType, w, h, runRemotely, multipleWindows, 158 degrees, decoderName); 159 } catch (Throwable t) { 160 t.printStackTrace(); 161 mTestException = t; 162 } 163 } 164 }); 165 renderingThread.start(); 166 renderingThread.join(60000); 167 assertTrue(!renderingThread.isAlive()); 168 if (mTestException != null) { 169 throw mTestException; 170 } 171 } 172 doTestRenderingOutput(final Context context, String mimeType, int w, int h, boolean runRemotely, boolean multipleWindows, int degrees, String decoderName)173 private void doTestRenderingOutput(final Context context, String mimeType, int w, int h, 174 boolean runRemotely, boolean multipleWindows, int degrees, 175 String decoderName) throws Throwable { 176 if (DBG) { 177 Log.i(TAG, "doTestRenderingOutput for type:" + mimeType + " w:" + w + " h:" + h); 178 } 179 try { 180 mIsQuitting = false; 181 if (decoderName == null) { 182 mDecoder = MediaCodec.createDecoderByType(mimeType); 183 } else { 184 mDecoder = MediaCodec.createByCodecName(decoderName); 185 } 186 MediaFormat decoderFormat = MediaFormat.createVideoFormat(mimeType, w, h); 187 decoderFormat.setInteger( 188 MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_LIMITED); 189 decoderFormat.setInteger( 190 MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT601_PAL); 191 decoderFormat.setInteger( 192 MediaFormat.KEY_COLOR_TRANSFER, MediaFormat.COLOR_TRANSFER_SDR_VIDEO); 193 if (degrees != 0) { 194 decoderFormat.setInteger(MediaFormat.KEY_ROTATION, degrees); 195 } 196 mDecodingSurface = new OutputSurface(w, h); 197 mDecoder.configure(decoderFormat, mDecodingSurface.getSurface(), null, 0); 198 // only scale to fit scaling mode is supported 199 mDecoder.setVideoScalingMode(MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT); 200 mDecoder.start(); 201 mDecoderInputBuffers = mDecoder.getInputBuffers(); 202 203 mEncodingHelper = new EncodingHelper(); 204 mEncodingSurface = mEncodingHelper.startEncoding(mimeType, w, h, 205 new EncoderEventListener() { 206 @Override 207 public void onCodecConfig(ByteBuffer data, BufferInfo info) { 208 if (DBG) { 209 Log.i(TAG, "onCodecConfig l:" + info.size); 210 } 211 handleEncodedData(data, info); 212 } 213 214 @Override 215 public void onBufferReady(ByteBuffer data, BufferInfo info) { 216 if (DBG) { 217 Log.i(TAG, "onBufferReady l:" + info.size); 218 } 219 handleEncodedData(data, info); 220 } 221 222 @Override 223 public void onError(String errorMessage) { 224 fail(errorMessage); 225 } 226 227 private void handleEncodedData(ByteBuffer data, BufferInfo info) { 228 if (mIsQuitting) { 229 if (DBG) { 230 Log.i(TAG, "ignore data as test is quitting"); 231 } 232 return; 233 } 234 int inputBufferIndex = mDecoder.dequeueInputBuffer(DEFAULT_WAIT_TIMEOUT_US); 235 if (inputBufferIndex < 0) { 236 if (DBG) { 237 Log.i(TAG, "dequeueInputBuffer returned:" + inputBufferIndex); 238 } 239 return; 240 } 241 assertTrue(inputBufferIndex >= 0); 242 ByteBuffer inputBuffer = mDecoderInputBuffers[inputBufferIndex]; 243 inputBuffer.clear(); 244 inputBuffer.put(data); 245 mDecoder.queueInputBuffer(inputBufferIndex, 0, info.size, 246 info.presentationTimeUs, info.flags); 247 } 248 }); 249 GlCompositor compositor = new GlCompositor(context); 250 if (DBG) { 251 Log.i(TAG, "start composition"); 252 } 253 compositor.startComposition(mEncodingSurface, w, h, multipleWindows ? 3 : 1); 254 255 if (DBG) { 256 Log.i(TAG, "create display"); 257 } 258 259 Renderer renderer = null; 260 Surface windowSurface = compositor.getWindowSurface(multipleWindows? 1 : 0); 261 if (runRemotely) { 262 mRemotePresentation = 263 new RemoteVirtualDisplayPresentation(context, windowSurface, w, h); 264 mRemotePresentation.connect(); 265 mRemotePresentation.start(); 266 renderer = mRemotePresentation; 267 } else { 268 mLocalPresentation = (degrees == 0) 269 ? new VirtualDisplayPresentation(context, windowSurface, w, h) 270 : new RotateVirtualDisplayPresentation(context, windowSurface, w, h); 271 mLocalPresentation.createVirtualDisplay(); 272 mLocalPresentation.createPresentation(); 273 renderer = mLocalPresentation; 274 } 275 276 if (DBG) { 277 Log.i(TAG, "start rendering and check"); 278 } 279 if (degrees == 0) { 280 renderColorAndCheckResult(renderer, w, h, COLOR_RED); 281 renderColorAndCheckResult(renderer, w, h, COLOR_BLUE); 282 renderColorAndCheckResult(renderer, w, h, COLOR_GREEN); 283 renderColorAndCheckResult(renderer, w, h, COLOR_GREY); 284 } else { 285 renderRotationAndCheckResult(renderer, w, h, degrees); 286 } 287 288 mIsQuitting = true; 289 if (runRemotely) { 290 mRemotePresentation.disconnect(); 291 } else { 292 mLocalPresentation.dismissPresentation(); 293 mLocalPresentation.destroyVirtualDisplay(); 294 } 295 296 compositor.stopComposition(); 297 } finally { 298 if (mEncodingHelper != null) { 299 mEncodingHelper.stopEncoding(); 300 mEncodingHelper = null; 301 } 302 if (mDecoder != null) { 303 mDecoder.stop(); 304 mDecoder.release(); 305 mDecoder = null; 306 } 307 if (mDecodingSurface != null) { 308 mDecodingSurface.release(); 309 mDecodingSurface = null; 310 } 311 } 312 } 313 314 private static final int NUM_MAX_RETRY = 120; 315 private static final int IMAGE_WAIT_TIMEOUT_MS = 1000; 316 renderColorAndCheckResult(Renderer renderer, int w, int h, int color)317 private void renderColorAndCheckResult(Renderer renderer, int w, int h, 318 int color) throws Exception { 319 BufferInfo info = new BufferInfo(); 320 for (int i = 0; i < NUM_MAX_RETRY; i++) { 321 renderer.doRendering(color); 322 int bufferIndex = mDecoder.dequeueOutputBuffer(info, DEFAULT_WAIT_TIMEOUT_US); 323 if (DBG) { 324 Log.i(TAG, "decoder dequeueOutputBuffer returned " + bufferIndex); 325 } 326 if (bufferIndex < 0) { 327 continue; 328 } 329 mDecoder.releaseOutputBuffer(bufferIndex, true); 330 if (mDecodingSurface.checkForNewImage(IMAGE_WAIT_TIMEOUT_MS)) { 331 mDecodingSurface.drawImage(); 332 if (checkSurfaceFrameColor(w, h, color)) { 333 Log.i(TAG, "color " + Integer.toHexString(color) + " matched"); 334 return; 335 } 336 } else if(DBG) { 337 Log.i(TAG, "no rendering yet"); 338 } 339 } 340 fail("Color did not match"); 341 } 342 renderRotationAndCheckResult(Renderer renderer, int w, int h, int degrees)343 private void renderRotationAndCheckResult(Renderer renderer, int w, int h, 344 int degrees) throws Exception { 345 BufferInfo info = new BufferInfo(); 346 for (int i = 0; i < NUM_MAX_RETRY; i++) { 347 renderer.doRendering(-1); 348 int bufferIndex = mDecoder.dequeueOutputBuffer(info, DEFAULT_WAIT_TIMEOUT_US); 349 if (DBG) { 350 Log.i(TAG, "decoder dequeueOutputBuffer returned " + bufferIndex); 351 } 352 if (bufferIndex < 0) { 353 continue; 354 } 355 mDecoder.releaseOutputBuffer(bufferIndex, true); 356 if (mDecodingSurface.checkForNewImage(IMAGE_WAIT_TIMEOUT_MS)) { 357 mDecodingSurface.drawImage(); 358 if (checkRotatedFrameQuadrants(w, h, degrees)) { 359 Log.i(TAG, "output rotated " + degrees + " degrees"); 360 return; 361 } 362 } else if(DBG) { 363 Log.i(TAG, "no rendering yet"); 364 } 365 } 366 fail("Frame not properly rotated"); 367 } 368 checkRotatedFrameQuadrants(int w, int h, int degrees)369 private boolean checkRotatedFrameQuadrants(int w, int h, int degrees) { 370 // Read a pixel from each quadrant of the surface. 371 int ww = w / 4; 372 int hh = h / 4; 373 // coords is ordered counter clockwise (note, gl 0,0 is bottom left) 374 int[][] coords = new int[][] {{ww, hh}, {ww * 3, hh}, {ww * 3, hh * 3}, {ww, hh * 3}}; 375 List<Integer> expected = new ArrayList<>(); 376 List<Integer> colors = Arrays.asList( 377 new Integer[] {COLOR_GREEN, COLOR_BLUE, COLOR_RED, COLOR_GREY}); 378 expected.addAll(colors); 379 expected.addAll(colors); 380 int offset = (degrees / 90) % 4; 381 for (int i = 0; i < coords.length; i++) { 382 int[] c = coords[i]; 383 int x = c[0]; 384 int y = c[1]; 385 GLES20.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); 386 int r = mPixelBuf.get(0) & 0xff; 387 int g = mPixelBuf.get(1) & 0xff; 388 int b = mPixelBuf.get(2) & 0xff; 389 // adding the offset to rotate expected colors clockwise 390 int color = expected.get(offset + i); 391 int redExpected = (color >> 16) & 0xff; 392 int greenExpected = (color >> 8) & 0xff; 393 int blueExpected = color & 0xff; 394 Log.i(TAG, String.format("(%d,%d) expecting %d,%d,%d saw %d,%d,%d", 395 x, y, redExpected, greenExpected, blueExpected, r, g, b)); 396 if (!approxEquals(redExpected, r) || !approxEquals(greenExpected, g) 397 || !approxEquals(blueExpected, b)) { 398 return false; 399 } 400 } 401 return true; 402 } 403 checkSurfaceFrameColor(int w, int h, int color)404 private boolean checkSurfaceFrameColor(int w, int h, int color) { 405 // Read a pixel from the center of the surface. Might want to read from multiple points 406 // and average them together. 407 int x = w / 2; 408 int y = h / 2; 409 GLES20.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); 410 int r = mPixelBuf.get(0) & 0xff; 411 int g = mPixelBuf.get(1) & 0xff; 412 int b = mPixelBuf.get(2) & 0xff; 413 414 int redExpected = (color >> 16) & 0xff; 415 int greenExpected = (color >> 8) & 0xff; 416 int blueExpected = color & 0xff; 417 if (approxEquals(redExpected, r) && approxEquals(greenExpected, g) 418 && approxEquals(blueExpected, b)) { 419 return true; 420 } 421 Log.i(TAG, "expected 0x" + Integer.toHexString(color) + " got 0x" 422 + Integer.toHexString(makeColor(r, g, b))); 423 return false; 424 } 425 426 /** 427 * Determines if two color values are approximately equal. 428 */ approxEquals(int expected, int actual)429 private static boolean approxEquals(int expected, int actual) { 430 final int MAX_DELTA = 7; 431 return Math.abs(expected - actual) <= MAX_DELTA; 432 } 433 434 private static final int NUM_CODEC_CREATION = 5; 435 private static final int NUM_DISPLAY_CREATION = 10; 436 private static final int NUM_RENDERING = 10; doTestVirtualDisplayRecycles(final Context context, int numDisplays)437 void doTestVirtualDisplayRecycles(final Context context, int numDisplays) throws Exception { 438 Size maxSize = getMaxSupportedEncoderSize(); 439 if (maxSize == null) { 440 Log.i(TAG, "no codec found, skipping"); 441 return; 442 } 443 VirtualDisplayPresentation[] virtualDisplays = new VirtualDisplayPresentation[numDisplays]; 444 for (int i = 0; i < NUM_CODEC_CREATION; i++) { 445 mCodecConfigReceived = false; 446 mCodecBufferReceived = false; 447 if (DBG) { 448 Log.i(TAG, "start encoding"); 449 } 450 EncodingHelper encodingHelper = new EncodingHelper(); 451 try { 452 mEncodingSurface = encodingHelper.startEncoding( 453 MIME_TYPE, maxSize.getWidth(), maxSize.getHeight(), mEncoderEventListener); 454 GlCompositor compositor = new GlCompositor(context); 455 if (DBG) { 456 Log.i(TAG, "start composition"); 457 } 458 compositor.startComposition(mEncodingSurface, 459 maxSize.getWidth(), maxSize.getHeight(), numDisplays); 460 for (int j = 0; j < NUM_DISPLAY_CREATION; j++) { 461 if (DBG) { 462 Log.i(TAG, "create display"); 463 } 464 for (int k = 0; k < numDisplays; k++) { 465 virtualDisplays[k] = 466 new VirtualDisplayPresentation(context, 467 compositor.getWindowSurface(k), 468 maxSize.getWidth()/numDisplays, maxSize.getHeight()); 469 virtualDisplays[k].createVirtualDisplay(); 470 virtualDisplays[k].createPresentation(); 471 } 472 if (DBG) { 473 Log.i(TAG, "start rendering"); 474 } 475 for (int k = 0; k < NUM_RENDERING; k++) { 476 for (int l = 0; l < numDisplays; l++) { 477 virtualDisplays[l].doRendering(COLOR_RED); 478 } 479 // do not care how many frames are actually rendered. 480 Thread.sleep(1); 481 } 482 for (int k = 0; k < numDisplays; k++) { 483 virtualDisplays[k].dismissPresentation(); 484 virtualDisplays[k].destroyVirtualDisplay(); 485 } 486 compositor.recreateWindows(); 487 } 488 if (DBG) { 489 Log.i(TAG, "stop composition"); 490 } 491 compositor.stopComposition(); 492 } finally { 493 if (DBG) { 494 Log.i(TAG, "stop encoding"); 495 } 496 encodingHelper.stopEncoding(); 497 assertTrue(mCodecConfigReceived); 498 assertTrue(mCodecBufferReceived); 499 } 500 } 501 } 502 503 interface EncoderEventListener { onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info)504 public void onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info); onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info)505 public void onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info); onError(String errorMessage)506 public void onError(String errorMessage); 507 } 508 509 private class EncodingHelper { 510 private MediaCodec mEncoder; 511 private volatile boolean mStopEncoding = false; 512 private EncoderEventListener mEventListener; 513 private String mMimeType; 514 private int mW; 515 private int mH; 516 private Thread mEncodingThread; 517 private Surface mEncodingSurface; 518 private Semaphore mInitCompleted = new Semaphore(0); 519 startEncoding(String mimeType, int w, int h, EncoderEventListener eventListener)520 Surface startEncoding(String mimeType, int w, int h, EncoderEventListener eventListener) { 521 mStopEncoding = false; 522 mMimeType = mimeType; 523 mW = w; 524 mH = h; 525 mEventListener = eventListener; 526 mEncodingThread = new Thread(new Runnable() { 527 @Override 528 public void run() { 529 try { 530 doEncoding(); 531 } catch (Exception e) { 532 e.printStackTrace(); 533 mEventListener.onError(e.toString()); 534 } 535 } 536 }); 537 mEncodingThread.start(); 538 try { 539 if (DBG) { 540 Log.i(TAG, "wait for encoder init"); 541 } 542 mInitCompleted.acquire(); 543 if (DBG) { 544 Log.i(TAG, "wait for encoder done"); 545 } 546 } catch (InterruptedException e) { 547 fail("should not happen"); 548 } 549 return mEncodingSurface; 550 } 551 stopEncoding()552 void stopEncoding() { 553 try { 554 mStopEncoding = true; 555 mEncodingThread.join(); 556 } catch(InterruptedException e) { 557 // just ignore 558 } finally { 559 mEncodingThread = null; 560 } 561 } 562 doEncoding()563 private void doEncoding() throws Exception { 564 final int TIMEOUT_USEC_NORMAL = 1000000; 565 MediaFormat format = MediaFormat.createVideoFormat(mMimeType, mW, mH); 566 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 567 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 568 int bitRate = BITRATE_DEFAULT; 569 if (mW == 1920 && mH == 1080) { 570 bitRate = BITRATE_1080p; 571 } else if (mW == 1280 && mH == 720) { 572 bitRate = BITRATE_720p; 573 } else if (mW == 800 && mH == 480) { 574 bitRate = BITRATE_800x480; 575 } 576 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 577 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 578 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 579 format.setInteger(MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_LIMITED); 580 format.setInteger(MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT601_PAL); 581 format.setInteger(MediaFormat.KEY_COLOR_TRANSFER, MediaFormat.COLOR_TRANSFER_SDR_VIDEO); 582 583 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 584 String codecName = null; 585 if ((codecName = mcl.findEncoderForFormat(format)) == null) { 586 throw new RuntimeException("encoder "+ MIME_TYPE + " not support : " + format.toString()); 587 } 588 589 try { 590 mEncoder = MediaCodec.createByCodecName(codecName); 591 mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 592 mEncodingSurface = mEncoder.createInputSurface(); 593 mEncoder.start(); 594 mInitCompleted.release(); 595 if (DBG) { 596 Log.i(TAG, "starting encoder"); 597 } 598 ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers(); 599 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 600 while (!mStopEncoding) { 601 int index = mEncoder.dequeueOutputBuffer(info, TIMEOUT_USEC_NORMAL); 602 if (DBG) { 603 Log.i(TAG, "encoder dequeOutputBuffer returned " + index); 604 } 605 if (index >= 0) { 606 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 607 Log.i(TAG, "codec config data"); 608 ByteBuffer encodedData = encoderOutputBuffers[index]; 609 encodedData.position(info.offset); 610 encodedData.limit(info.offset + info.size); 611 mEventListener.onCodecConfig(encodedData, info); 612 mEncoder.releaseOutputBuffer(index, false); 613 } else if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 614 Log.i(TAG, "EOS, stopping encoding"); 615 break; 616 } else { 617 ByteBuffer encodedData = encoderOutputBuffers[index]; 618 encodedData.position(info.offset); 619 encodedData.limit(info.offset + info.size); 620 mEventListener.onBufferReady(encodedData, info); 621 mEncoder.releaseOutputBuffer(index, false); 622 } 623 } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED){ 624 Log.i(TAG, "output buffer changed"); 625 encoderOutputBuffers = mEncoder.getOutputBuffers(); 626 } 627 } 628 } catch (Exception e) { 629 e.printStackTrace(); 630 throw e; 631 } finally { 632 if (mEncoder != null) { 633 mEncoder.stop(); 634 mEncoder.release(); 635 mEncoder = null; 636 } 637 if (mEncodingSurface != null) { 638 mEncodingSurface.release(); 639 mEncodingSurface = null; 640 } 641 } 642 } 643 } 644 645 /** 646 * Handles composition of multiple SurfaceTexture into a single Surface 647 */ 648 private static class GlCompositor implements SurfaceTexture.OnFrameAvailableListener { 649 private final Context mContext; 650 private Surface mSurface; 651 private int mWidth; 652 private int mHeight; 653 private volatile int mNumWindows; 654 private GlWindow mTopWindow; 655 private Thread mCompositionThread; 656 private Semaphore mStartCompletionSemaphore; 657 private Semaphore mRecreationCompletionSemaphore; 658 private Looper mLooper; 659 private Handler mHandler; 660 private InputSurface mEglHelper; 661 private int mGlProgramId = 0; 662 private int mGluMVPMatrixHandle; 663 private int mGluSTMatrixHandle; 664 private int mGlaPositionHandle; 665 private int mGlaTextureHandle; 666 private float[] mMVPMatrix = new float[16]; 667 private TopWindowVirtualDisplayPresentation mTopPresentation; 668 669 private static final String VERTEX_SHADER = 670 "uniform mat4 uMVPMatrix;\n" + 671 "uniform mat4 uSTMatrix;\n" + 672 "attribute vec4 aPosition;\n" + 673 "attribute vec4 aTextureCoord;\n" + 674 "varying vec2 vTextureCoord;\n" + 675 "void main() {\n" + 676 " gl_Position = uMVPMatrix * aPosition;\n" + 677 " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + 678 "}\n"; 679 680 private static final String FRAGMENT_SHADER = 681 "#extension GL_OES_EGL_image_external : require\n" + 682 "precision mediump float;\n" + 683 "varying vec2 vTextureCoord;\n" + 684 "uniform samplerExternalOES sTexture;\n" + 685 "void main() {\n" + 686 " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + 687 "}\n"; 688 GlCompositor(Context context)689 public GlCompositor(Context context) { 690 mContext = context; 691 } 692 startComposition(Surface surface, int w, int h, int numWindows)693 void startComposition(Surface surface, int w, int h, int numWindows) throws Exception { 694 mSurface = surface; 695 mWidth = w; 696 mHeight = h; 697 mNumWindows = numWindows; 698 mCompositionThread = new Thread(new CompositionRunnable()); 699 mStartCompletionSemaphore = new Semaphore(0); 700 mCompositionThread.start(); 701 waitForStartCompletion(); 702 } 703 stopComposition()704 void stopComposition() { 705 try { 706 if (mLooper != null) { 707 mLooper.quit(); 708 mCompositionThread.join(); 709 } 710 } catch (InterruptedException e) { 711 // don't care 712 } 713 mCompositionThread = null; 714 mSurface = null; 715 mStartCompletionSemaphore = null; 716 } 717 getWindowSurface(int windowIndex)718 Surface getWindowSurface(int windowIndex) { 719 return mTopPresentation.getSurface(windowIndex); 720 } 721 recreateWindows()722 void recreateWindows() throws Exception { 723 mRecreationCompletionSemaphore = new Semaphore(0); 724 Message msg = mHandler.obtainMessage(CompositionHandler.DO_RECREATE_WINDOWS); 725 mHandler.sendMessage(msg); 726 if(!mRecreationCompletionSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, 727 TimeUnit.MILLISECONDS)) { 728 fail("recreation timeout"); 729 } 730 mTopPresentation.waitForSurfaceReady(DEFAULT_WAIT_TIMEOUT_MS); 731 } 732 733 @Override onFrameAvailable(SurfaceTexture surface)734 public void onFrameAvailable(SurfaceTexture surface) { 735 if (DBG) { 736 Log.i(TAG, "onFrameAvailable " + surface); 737 } 738 GlWindow w = mTopWindow; 739 if (w != null) { 740 w.markTextureUpdated(); 741 requestUpdate(); 742 } else { 743 Log.w(TAG, "top window gone"); 744 } 745 } 746 requestUpdate()747 private void requestUpdate() { 748 Thread compositionThread = mCompositionThread; 749 if (compositionThread == null || !compositionThread.isAlive()) { 750 return; 751 } 752 Message msg = mHandler.obtainMessage(CompositionHandler.DO_RENDERING); 753 mHandler.sendMessage(msg); 754 } 755 loadShader(int shaderType, String source)756 private int loadShader(int shaderType, String source) throws GlException { 757 int shader = GLES20.glCreateShader(shaderType); 758 checkGlError("glCreateShader type=" + shaderType); 759 GLES20.glShaderSource(shader, source); 760 GLES20.glCompileShader(shader); 761 int[] compiled = new int[1]; 762 GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); 763 if (compiled[0] == 0) { 764 Log.e(TAG, "Could not compile shader " + shaderType + ":"); 765 Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); 766 GLES20.glDeleteShader(shader); 767 shader = 0; 768 } 769 return shader; 770 } 771 createProgram(String vertexSource, String fragmentSource)772 private int createProgram(String vertexSource, String fragmentSource) throws GlException { 773 int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); 774 if (vertexShader == 0) { 775 return 0; 776 } 777 int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); 778 if (pixelShader == 0) { 779 return 0; 780 } 781 782 int program = GLES20.glCreateProgram(); 783 checkGlError("glCreateProgram"); 784 if (program == 0) { 785 Log.e(TAG, "Could not create program"); 786 } 787 GLES20.glAttachShader(program, vertexShader); 788 checkGlError("glAttachShader"); 789 GLES20.glAttachShader(program, pixelShader); 790 checkGlError("glAttachShader"); 791 GLES20.glLinkProgram(program); 792 int[] linkStatus = new int[1]; 793 GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); 794 if (linkStatus[0] != GLES20.GL_TRUE) { 795 Log.e(TAG, "Could not link program: "); 796 Log.e(TAG, GLES20.glGetProgramInfoLog(program)); 797 GLES20.glDeleteProgram(program); 798 program = 0; 799 } 800 return program; 801 } 802 initGl()803 private void initGl() throws GlException { 804 mEglHelper = new InputSurface(mSurface); 805 mEglHelper.makeCurrent(); 806 mGlProgramId = createProgram(VERTEX_SHADER, FRAGMENT_SHADER); 807 mGlaPositionHandle = GLES20.glGetAttribLocation(mGlProgramId, "aPosition"); 808 checkGlError("glGetAttribLocation aPosition"); 809 if (mGlaPositionHandle == -1) { 810 throw new RuntimeException("Could not get attrib location for aPosition"); 811 } 812 mGlaTextureHandle = GLES20.glGetAttribLocation(mGlProgramId, "aTextureCoord"); 813 checkGlError("glGetAttribLocation aTextureCoord"); 814 if (mGlaTextureHandle == -1) { 815 throw new RuntimeException("Could not get attrib location for aTextureCoord"); 816 } 817 mGluMVPMatrixHandle = GLES20.glGetUniformLocation(mGlProgramId, "uMVPMatrix"); 818 checkGlError("glGetUniformLocation uMVPMatrix"); 819 if (mGluMVPMatrixHandle == -1) { 820 throw new RuntimeException("Could not get attrib location for uMVPMatrix"); 821 } 822 mGluSTMatrixHandle = GLES20.glGetUniformLocation(mGlProgramId, "uSTMatrix"); 823 checkGlError("glGetUniformLocation uSTMatrix"); 824 if (mGluSTMatrixHandle == -1) { 825 throw new RuntimeException("Could not get attrib location for uSTMatrix"); 826 } 827 Matrix.setIdentityM(mMVPMatrix, 0); 828 Log.i(TAG, "initGl w:" + mWidth + " h:" + mHeight); 829 GLES20.glViewport(0, 0, mWidth, mHeight); 830 float[] vMatrix = new float[16]; 831 float[] projMatrix = new float[16]; 832 // max window is from (0,0) to (mWidth - 1, mHeight - 1) 833 float wMid = mWidth / 2f; 834 float hMid = mHeight / 2f; 835 // look from positive z to hide windows in lower z 836 Matrix.setLookAtM(vMatrix, 0, wMid, hMid, 5f, wMid, hMid, 0f, 0f, 1.0f, 0.0f); 837 Matrix.orthoM(projMatrix, 0, -wMid, wMid, -hMid, hMid, 1, 10); 838 Matrix.multiplyMM(mMVPMatrix, 0, projMatrix, 0, vMatrix, 0); 839 createWindows(); 840 841 } 842 createWindows()843 private void createWindows() throws GlException { 844 mTopWindow = new GlWindow(this, 0, 0, mWidth, mHeight); 845 mTopWindow.init(); 846 mTopPresentation = new TopWindowVirtualDisplayPresentation(mContext, 847 mTopWindow.getSurface(), mWidth, mHeight, mNumWindows); 848 mTopPresentation.createVirtualDisplay(); 849 mTopPresentation.createPresentation(); 850 ((TopWindowPresentation) mTopPresentation.getPresentation()).populateWindows(); 851 } 852 cleanupGl()853 private void cleanupGl() { 854 if (mTopPresentation != null) { 855 mTopPresentation.dismissPresentation(); 856 mTopPresentation.destroyVirtualDisplay(); 857 mTopPresentation = null; 858 } 859 if (mTopWindow != null) { 860 mTopWindow.cleanup(); 861 mTopWindow = null; 862 } 863 if (mEglHelper != null) { 864 mEglHelper.release(); 865 mEglHelper = null; 866 } 867 } 868 doGlRendering()869 private void doGlRendering() throws GlException { 870 if (DBG) { 871 Log.i(TAG, "doGlRendering"); 872 } 873 mTopWindow.updateTexImageIfNecessary(); 874 GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f); 875 GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); 876 877 GLES20.glUseProgram(mGlProgramId); 878 GLES20.glUniformMatrix4fv(mGluMVPMatrixHandle, 1, false, mMVPMatrix, 0); 879 mTopWindow.onDraw(mGluSTMatrixHandle, mGlaPositionHandle, mGlaTextureHandle); 880 checkGlError("window draw"); 881 if (DBG) { 882 final IntBuffer pixels = IntBuffer.allocate(1); 883 GLES20.glReadPixels(mWidth / 2, mHeight / 2, 1, 1, 884 GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixels); 885 Log.i(TAG, "glReadPixels returned 0x" + Integer.toHexString(pixels.get(0))); 886 } 887 mEglHelper.swapBuffers(); 888 } 889 doRecreateWindows()890 private void doRecreateWindows() throws GlException { 891 mTopPresentation.dismissPresentation(); 892 mTopPresentation.destroyVirtualDisplay(); 893 mTopWindow.cleanup(); 894 createWindows(); 895 mRecreationCompletionSemaphore.release(); 896 } 897 waitForStartCompletion()898 private void waitForStartCompletion() throws Exception { 899 if (!mStartCompletionSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, 900 TimeUnit.MILLISECONDS)) { 901 fail("start timeout"); 902 } 903 mStartCompletionSemaphore = null; 904 mTopPresentation.waitForSurfaceReady(DEFAULT_WAIT_TIMEOUT_MS); 905 } 906 907 private class CompositionRunnable implements Runnable { 908 @Override run()909 public void run() { 910 try { 911 Looper.prepare(); 912 mLooper = Looper.myLooper(); 913 mHandler = new CompositionHandler(); 914 initGl(); 915 // init done 916 mStartCompletionSemaphore.release(); 917 Looper.loop(); 918 } catch (GlException e) { 919 e.printStackTrace(); 920 fail("got gl exception"); 921 } finally { 922 cleanupGl(); 923 mHandler = null; 924 mLooper = null; 925 } 926 } 927 } 928 929 private class CompositionHandler extends Handler { 930 private static final int DO_RENDERING = 1; 931 private static final int DO_RECREATE_WINDOWS = 2; 932 933 @Override handleMessage(Message msg)934 public void handleMessage(Message msg) { 935 try { 936 switch(msg.what) { 937 case DO_RENDERING: { 938 doGlRendering(); 939 } break; 940 case DO_RECREATE_WINDOWS: { 941 doRecreateWindows(); 942 } break; 943 } 944 } catch (GlException e) { 945 //ignore as this can happen during tearing down 946 } 947 } 948 } 949 950 private class GlWindow { 951 private static final int FLOAT_SIZE_BYTES = 4; 952 private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; 953 private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; 954 private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; 955 private int mBlX; 956 private int mBlY; 957 private int mWidth; 958 private int mHeight; 959 private int mTextureId = 0; // 0 is invalid 960 private volatile SurfaceTexture mSurfaceTexture; 961 private volatile Surface mSurface; 962 private FloatBuffer mVerticesData; 963 private float[] mSTMatrix = new float[16]; 964 private AtomicInteger mNumTextureUpdated = new AtomicInteger(0); 965 private GlCompositor mCompositor; 966 967 /** 968 * @param blX X coordinate of bottom-left point of window 969 * @param blY Y coordinate of bottom-left point of window 970 * @param w window width 971 * @param h window height 972 */ GlWindow(GlCompositor compositor, int blX, int blY, int w, int h)973 public GlWindow(GlCompositor compositor, int blX, int blY, int w, int h) { 974 mCompositor = compositor; 975 mBlX = blX; 976 mBlY = blY; 977 mWidth = w; 978 mHeight = h; 979 int trX = blX + w; 980 int trY = blY + h; 981 float[] vertices = new float[] { 982 // x, y, z, u, v 983 mBlX, mBlY, 0, 0, 0, 984 trX, mBlY, 0, 1, 0, 985 mBlX, trY, 0, 0, 1, 986 trX, trY, 0, 1, 1 987 }; 988 Log.i(TAG, "create window " + this + " blX:" + mBlX + " blY:" + mBlY + " trX:" + 989 trX + " trY:" + trY); 990 mVerticesData = ByteBuffer.allocateDirect( 991 vertices.length * FLOAT_SIZE_BYTES) 992 .order(ByteOrder.nativeOrder()).asFloatBuffer(); 993 mVerticesData.put(vertices).position(0); 994 } 995 996 /** 997 * initialize the window for composition. counter-part is cleanup() 998 * @throws GlException 999 */ init()1000 public void init() throws GlException { 1001 int[] textures = new int[1]; 1002 GLES20.glGenTextures(1, textures, 0); 1003 1004 mTextureId = textures[0]; 1005 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); 1006 checkGlError("glBindTexture mTextureID"); 1007 1008 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 1009 GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); 1010 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 1011 GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); 1012 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, 1013 GLES20.GL_CLAMP_TO_EDGE); 1014 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, 1015 GLES20.GL_CLAMP_TO_EDGE); 1016 checkGlError("glTexParameter"); 1017 mSurfaceTexture = new SurfaceTexture(mTextureId); 1018 mSurfaceTexture.setDefaultBufferSize(mWidth, mHeight); 1019 mSurface = new Surface(mSurfaceTexture); 1020 mSurfaceTexture.setOnFrameAvailableListener(mCompositor); 1021 } 1022 cleanup()1023 public void cleanup() { 1024 mNumTextureUpdated.set(0); 1025 if (mTextureId != 0) { 1026 int[] textures = new int[] { 1027 mTextureId 1028 }; 1029 GLES20.glDeleteTextures(1, textures, 0); 1030 } 1031 GLES20.glFinish(); 1032 if (mSurface != null) { 1033 mSurface.release(); 1034 mSurface = null; 1035 } 1036 if (mSurfaceTexture != null) { 1037 mSurfaceTexture.release(); 1038 mSurfaceTexture = null; 1039 } 1040 } 1041 1042 /** 1043 * make texture as updated so that it can be updated in the next rendering. 1044 */ markTextureUpdated()1045 public void markTextureUpdated() { 1046 mNumTextureUpdated.incrementAndGet(); 1047 } 1048 1049 /** 1050 * update texture for rendering if it is updated. 1051 */ updateTexImageIfNecessary()1052 public void updateTexImageIfNecessary() { 1053 int numTextureUpdated = mNumTextureUpdated.getAndDecrement(); 1054 if (numTextureUpdated > 0) { 1055 if (DBG) { 1056 Log.i(TAG, "updateTexImageIfNecessary " + this); 1057 } 1058 mSurfaceTexture.updateTexImage(); 1059 mSurfaceTexture.getTransformMatrix(mSTMatrix); 1060 } 1061 if (numTextureUpdated < 0) { 1062 fail("should not happen"); 1063 } 1064 } 1065 1066 /** 1067 * draw the window. It will not be drawn at all if the window is not visible. 1068 * @param uSTMatrixHandle shader handler for the STMatrix for texture coordinates 1069 * mapping 1070 * @param aPositionHandle shader handle for vertex position. 1071 * @param aTextureHandle shader handle for texture 1072 */ onDraw(int uSTMatrixHandle, int aPositionHandle, int aTextureHandle)1073 public void onDraw(int uSTMatrixHandle, int aPositionHandle, int aTextureHandle) { 1074 GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 1075 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); 1076 mVerticesData.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); 1077 GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false, 1078 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mVerticesData); 1079 GLES20.glEnableVertexAttribArray(aPositionHandle); 1080 1081 mVerticesData.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); 1082 GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false, 1083 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mVerticesData); 1084 GLES20.glEnableVertexAttribArray(aTextureHandle); 1085 GLES20.glUniformMatrix4fv(uSTMatrixHandle, 1, false, mSTMatrix, 0); 1086 GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 1087 } 1088 getSurfaceTexture()1089 public SurfaceTexture getSurfaceTexture() { 1090 return mSurfaceTexture; 1091 } 1092 getSurface()1093 public Surface getSurface() { 1094 return mSurface; 1095 } 1096 } 1097 } 1098 checkGlError(String op)1099 static void checkGlError(String op) throws GlException { 1100 int error; 1101 while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { 1102 Log.e(TAG, op + ": glError " + error); 1103 throw new GlException(op + ": glError " + error); 1104 } 1105 } 1106 1107 public static class GlException extends Exception { GlException(String msg)1108 public GlException(String msg) { 1109 super(msg); 1110 } 1111 } 1112 1113 private interface Renderer { doRendering(final int color)1114 void doRendering(final int color) throws Exception; 1115 } 1116 1117 private static class RotateVirtualDisplayPresentation extends VirtualDisplayPresentation { 1118 RotateVirtualDisplayPresentation(Context context, Surface surface, int w, int h)1119 RotateVirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1120 super(context, surface, w, h); 1121 } 1122 1123 @Override doCreatePresentation()1124 protected TestPresentationBase doCreatePresentation() { 1125 return new TestRotatePresentation(mContext, mVirtualDisplay.getDisplay()); 1126 } 1127 1128 } 1129 1130 private static class VirtualDisplayPresentation implements Renderer { 1131 protected final Context mContext; 1132 protected final Surface mSurface; 1133 protected final int mWidth; 1134 protected final int mHeight; 1135 protected VirtualDisplay mVirtualDisplay; 1136 protected TestPresentationBase mPresentation; 1137 private final DisplayManager mDisplayManager; 1138 VirtualDisplayPresentation(Context context, Surface surface, int w, int h)1139 VirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1140 mContext = context; 1141 mSurface = surface; 1142 mWidth = w; 1143 mHeight = h; 1144 mDisplayManager = (DisplayManager)context.getSystemService(Context.DISPLAY_SERVICE); 1145 } 1146 createVirtualDisplay()1147 void createVirtualDisplay() { 1148 runOnMainSync(new Runnable() { 1149 @Override 1150 public void run() { 1151 mVirtualDisplay = mDisplayManager.createVirtualDisplay( 1152 TAG, mWidth, mHeight, 200, mSurface, 1153 DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY | 1154 DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION); 1155 } 1156 }); 1157 } 1158 destroyVirtualDisplay()1159 void destroyVirtualDisplay() { 1160 runOnMainSync(new Runnable() { 1161 @Override 1162 public void run() { 1163 mVirtualDisplay.release(); 1164 } 1165 }); 1166 } 1167 createPresentation()1168 void createPresentation() { 1169 runOnMainSync(new Runnable() { 1170 @Override 1171 public void run() { 1172 mPresentation = doCreatePresentation(); 1173 mPresentation.show(); 1174 } 1175 }); 1176 } 1177 doCreatePresentation()1178 protected TestPresentationBase doCreatePresentation() { 1179 return new TestPresentation(mContext, mVirtualDisplay.getDisplay()); 1180 } 1181 getPresentation()1182 TestPresentationBase getPresentation() { 1183 return mPresentation; 1184 } 1185 dismissPresentation()1186 void dismissPresentation() { 1187 runOnMainSync(new Runnable() { 1188 @Override 1189 public void run() { 1190 mPresentation.dismiss(); 1191 } 1192 }); 1193 } 1194 1195 @Override doRendering(final int color)1196 public void doRendering(final int color) throws Exception { 1197 runOnMainSync(new Runnable() { 1198 @Override 1199 public void run() { 1200 mPresentation.doRendering(color); 1201 } 1202 }); 1203 } 1204 } 1205 1206 private static class TestPresentationBase extends Presentation { 1207 TestPresentationBase(Context outerContext, Display display)1208 public TestPresentationBase(Context outerContext, Display display) { 1209 // This theme is required to prevent an extra view from obscuring the presentation 1210 super(outerContext, display, 1211 android.R.style.Theme_Holo_Light_NoActionBar_TranslucentDecor); 1212 getWindow().addFlags(WindowManager.LayoutParams.FLAG_LOCAL_FOCUS_MODE); 1213 getWindow().addFlags(WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED); 1214 } 1215 doRendering(int color)1216 public void doRendering(int color) { 1217 // to be implemented by child 1218 } 1219 } 1220 1221 private static class TestPresentation extends TestPresentationBase { 1222 private ImageView mImageView; 1223 TestPresentation(Context outerContext, Display display)1224 public TestPresentation(Context outerContext, Display display) { 1225 super(outerContext, display); 1226 } 1227 1228 @Override onCreate(Bundle savedInstanceState)1229 protected void onCreate(Bundle savedInstanceState) { 1230 super.onCreate(savedInstanceState); 1231 mImageView = new ImageView(getContext()); 1232 mImageView.setImageDrawable(new ColorDrawable(COLOR_RED)); 1233 mImageView.setLayoutParams(new LayoutParams( 1234 LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); 1235 setContentView(mImageView); 1236 } 1237 doRendering(int color)1238 public void doRendering(int color) { 1239 if (DBG) { 1240 Log.i(TAG, "doRendering " + Integer.toHexString(color)); 1241 } 1242 mImageView.setImageDrawable(new ColorDrawable(color)); 1243 } 1244 } 1245 1246 private static class TestRotatePresentation extends TestPresentationBase { 1247 static final int[] kColors = new int[] {COLOR_GREY, COLOR_RED, COLOR_GREEN, COLOR_BLUE}; 1248 private final ImageView[] mQuadrants = new ImageView[4]; 1249 TestRotatePresentation(Context outerContext, Display display)1250 public TestRotatePresentation(Context outerContext, Display display) { 1251 super(outerContext, display); 1252 } 1253 1254 @Override onCreate(Bundle savedInstanceState)1255 protected void onCreate(Bundle savedInstanceState) { 1256 super.onCreate(savedInstanceState); 1257 Context ctx = getContext(); 1258 TableLayout table = new TableLayout(ctx); 1259 ViewGroup.LayoutParams fill = new ViewGroup.LayoutParams( 1260 ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT); 1261 TableLayout.LayoutParams fillTable = new TableLayout.LayoutParams( 1262 ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT, 1f); 1263 TableRow.LayoutParams fillRow = new TableRow.LayoutParams( 1264 ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT, 1f); 1265 table.setLayoutParams(fill); 1266 table.setStretchAllColumns(true); 1267 TableRow rows[] = new TableRow[] {new TableRow(ctx), new TableRow(ctx)}; 1268 for (int i = 0; i < mQuadrants.length; i++) { 1269 mQuadrants[i] = new ImageView(ctx); 1270 mQuadrants[i].setImageDrawable(new ColorDrawable(kColors[i])); 1271 rows[i / 2].addView(mQuadrants[i], fillRow); 1272 } 1273 for (TableRow row: rows) { 1274 table.addView(row, fillTable); 1275 } 1276 setContentView(table); 1277 Log.v(TAG, "setContentView(table)"); 1278 } 1279 1280 @Override doRendering(int color)1281 public void doRendering(int color) { 1282 Log.v(TAG, "doRendering: ignoring color: " + Integer.toHexString(color)); 1283 for (int i = 0; i < mQuadrants.length; i++) { 1284 mQuadrants[i].setImageDrawable(new ColorDrawable(kColors[i])); 1285 } 1286 } 1287 1288 } 1289 1290 private static class TopWindowPresentation extends TestPresentationBase { 1291 private FrameLayout[] mWindowsLayout = new FrameLayout[MAX_NUM_WINDOWS]; 1292 private CompositionTextureView[] mWindows = new CompositionTextureView[MAX_NUM_WINDOWS]; 1293 private final int mNumWindows; 1294 private final Semaphore mWindowWaitSemaphore = new Semaphore(0); 1295 TopWindowPresentation(int numWindows, Context outerContext, Display display)1296 public TopWindowPresentation(int numWindows, Context outerContext, Display display) { 1297 super(outerContext, display); 1298 mNumWindows = numWindows; 1299 } 1300 1301 @Override onCreate(Bundle savedInstanceState)1302 protected void onCreate(Bundle savedInstanceState) { 1303 super.onCreate(savedInstanceState); 1304 if (DBG) { 1305 Log.i(TAG, "TopWindowPresentation onCreate, numWindows " + mNumWindows); 1306 } 1307 setContentView(R.layout.composition_layout); 1308 mWindowsLayout[0] = (FrameLayout) findViewById(R.id.window0); 1309 mWindowsLayout[1] = (FrameLayout) findViewById(R.id.window1); 1310 mWindowsLayout[2] = (FrameLayout) findViewById(R.id.window2); 1311 } 1312 populateWindows()1313 public void populateWindows() { 1314 runOnMain(new Runnable() { 1315 public void run() { 1316 for (int i = 0; i < mNumWindows; i++) { 1317 mWindows[i] = new CompositionTextureView(getContext()); 1318 mWindows[i].setLayoutParams(new ViewGroup.LayoutParams( 1319 ViewGroup.LayoutParams.MATCH_PARENT, 1320 ViewGroup.LayoutParams.MATCH_PARENT)); 1321 mWindowsLayout[i].setVisibility(View.VISIBLE); 1322 mWindowsLayout[i].addView(mWindows[i]); 1323 mWindows[i].startListening(); 1324 } 1325 mWindowWaitSemaphore.release(); 1326 } 1327 }); 1328 } 1329 waitForSurfaceReady(long timeoutMs)1330 public void waitForSurfaceReady(long timeoutMs) throws Exception { 1331 mWindowWaitSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, TimeUnit.MILLISECONDS); 1332 for (int i = 0; i < mNumWindows; i++) { 1333 if(!mWindows[i].waitForSurfaceReady(timeoutMs)) { 1334 fail("surface wait timeout"); 1335 } 1336 } 1337 } 1338 getSurface(int windowIndex)1339 public Surface getSurface(int windowIndex) { 1340 Surface surface = mWindows[windowIndex].getSurface(); 1341 assertNotNull(surface); 1342 return surface; 1343 } 1344 } 1345 1346 private static class TopWindowVirtualDisplayPresentation extends VirtualDisplayPresentation { 1347 private final int mNumWindows; 1348 TopWindowVirtualDisplayPresentation(Context context, Surface surface, int w, int h, int numWindows)1349 TopWindowVirtualDisplayPresentation(Context context, Surface surface, int w, int h, 1350 int numWindows) { 1351 super(context, surface, w, h); 1352 assertNotNull(surface); 1353 mNumWindows = numWindows; 1354 } 1355 waitForSurfaceReady(long timeoutMs)1356 void waitForSurfaceReady(long timeoutMs) throws Exception { 1357 ((TopWindowPresentation) mPresentation).waitForSurfaceReady(timeoutMs); 1358 } 1359 getSurface(int windowIndex)1360 Surface getSurface(int windowIndex) { 1361 return ((TopWindowPresentation) mPresentation).getSurface(windowIndex); 1362 } 1363 doCreatePresentation()1364 protected TestPresentationBase doCreatePresentation() { 1365 return new TopWindowPresentation(mNumWindows, mContext, mVirtualDisplay.getDisplay()); 1366 } 1367 } 1368 1369 private static class RemoteVirtualDisplayPresentation implements Renderer { 1370 /** argument: Surface, int w, int h, return none */ 1371 private static final int BINDER_CMD_START = IBinder.FIRST_CALL_TRANSACTION; 1372 /** argument: int color, return none */ 1373 private static final int BINDER_CMD_RENDER = IBinder.FIRST_CALL_TRANSACTION + 1; 1374 1375 private final Context mContext; 1376 private final Surface mSurface; 1377 private final int mWidth; 1378 private final int mHeight; 1379 1380 private IBinder mService; 1381 private final Semaphore mConnectionWait = new Semaphore(0); 1382 private final ServiceConnection mConnection = new ServiceConnection() { 1383 1384 public void onServiceConnected(ComponentName arg0, IBinder arg1) { 1385 mService = arg1; 1386 mConnectionWait.release(); 1387 } 1388 1389 public void onServiceDisconnected(ComponentName arg0) { 1390 //ignore 1391 } 1392 1393 }; 1394 RemoteVirtualDisplayPresentation(Context context, Surface surface, int w, int h)1395 RemoteVirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1396 mContext = context; 1397 mSurface = surface; 1398 mWidth = w; 1399 mHeight = h; 1400 } 1401 connect()1402 void connect() throws Exception { 1403 Intent intent = new Intent(); 1404 intent.setClassName("android.media.cts", 1405 "android.media.cts.RemoteVirtualDisplayService"); 1406 mContext.bindService(intent, mConnection, Context.BIND_AUTO_CREATE); 1407 if (!mConnectionWait.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, TimeUnit.MILLISECONDS)) { 1408 fail("cannot bind to service"); 1409 } 1410 } 1411 disconnect()1412 void disconnect() { 1413 mContext.unbindService(mConnection); 1414 } 1415 start()1416 void start() throws Exception { 1417 Parcel parcel = Parcel.obtain(); 1418 mSurface.writeToParcel(parcel, 0); 1419 parcel.writeInt(mWidth); 1420 parcel.writeInt(mHeight); 1421 mService.transact(BINDER_CMD_START, parcel, null, 0); 1422 } 1423 1424 @Override doRendering(int color)1425 public void doRendering(int color) throws Exception { 1426 Parcel parcel = Parcel.obtain(); 1427 parcel.writeInt(color); 1428 mService.transact(BINDER_CMD_RENDER, parcel, null, 0); 1429 } 1430 } 1431 getMaxSupportedEncoderSize()1432 private static Size getMaxSupportedEncoderSize() { 1433 final Size[] standardSizes = new Size[] { 1434 new Size(1920, 1080), 1435 new Size(1280, 720), 1436 new Size(720, 480), 1437 new Size(352, 576) 1438 }; 1439 1440 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 1441 for (Size sz : standardSizes) { 1442 MediaFormat format = MediaFormat.createVideoFormat( 1443 MIME_TYPE, sz.getWidth(), sz.getHeight()); 1444 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 1445 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 1446 int bitRate = BITRATE_DEFAULT; 1447 if (sz.getWidth() == 1920 && sz.getHeight() == 1080) { 1448 bitRate = BITRATE_1080p; 1449 } else if (sz.getWidth() == 1280 && sz.getHeight() == 720) { 1450 bitRate = BITRATE_720p; 1451 } else if (sz.getWidth() == 800 && sz.getHeight() == 480) { 1452 bitRate = BITRATE_800x480; 1453 } 1454 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1455 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1456 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 1457 Log.i(TAG,"format = " + format.toString()); 1458 if (mcl.findEncoderForFormat(format) != null) { 1459 return sz; 1460 } 1461 } 1462 return null; 1463 } 1464 1465 /** 1466 * Check maximum concurrent encoding / decoding resolution allowed. 1467 * Some H/Ws cannot support maximum resolution reported in encoder if decoder is running 1468 * at the same time. 1469 * Check is done for 4 different levels: 1080p, 720p, 800x480, 480p 1470 * (The last one is required by CDD.) 1471 */ checkMaxConcurrentEncodingDecodingResolution()1472 Size checkMaxConcurrentEncodingDecodingResolution() { 1473 if (isConcurrentEncodingDecodingSupported(MIME_TYPE, 1920, 1080, BITRATE_1080p)) { 1474 return new Size(1920, 1080); 1475 } else if (isConcurrentEncodingDecodingSupported(MIME_TYPE, 1280, 720, BITRATE_720p)) { 1476 return new Size(1280, 720); 1477 } else if (isConcurrentEncodingDecodingSupported(MIME_TYPE, 800, 480, BITRATE_800x480)) { 1478 return new Size(800, 480); 1479 } else if (isConcurrentEncodingDecodingSupported(MIME_TYPE, 720, 480, BITRATE_DEFAULT)) { 1480 return new Size(720, 480); 1481 } 1482 Log.i(TAG, "SKIPPING test: concurrent encoding and decoding is not supported"); 1483 return null; 1484 } 1485 isConcurrentEncodingDecodingSupported( String mimeType, int w, int h, int bitRate)1486 boolean isConcurrentEncodingDecodingSupported( 1487 String mimeType, int w, int h, int bitRate) { 1488 return isConcurrentEncodingDecodingSupported(mimeType, w, h, bitRate, null); 1489 } 1490 isConcurrentEncodingDecodingSupported( String mimeType, int w, int h, int bitRate, String decoderName)1491 boolean isConcurrentEncodingDecodingSupported( 1492 String mimeType, int w, int h, int bitRate, String decoderName) { 1493 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 1494 MediaFormat testFormat = MediaFormat.createVideoFormat(mimeType, w, h); 1495 testFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1496 testFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1497 if (mcl.findDecoderForFormat(testFormat) == null 1498 || mcl.findEncoderForFormat(testFormat) == null) { 1499 return false; 1500 } 1501 1502 MediaCodec decoder = null; 1503 OutputSurface decodingSurface = null; 1504 MediaCodec encoder = null; 1505 Surface encodingSurface = null; 1506 try { 1507 if (decoderName == null) { 1508 decoder = MediaCodec.createDecoderByType(mimeType); 1509 } else { 1510 decoder = MediaCodec.createByCodecName(decoderName); 1511 } 1512 MediaFormat decoderFormat = MediaFormat.createVideoFormat(mimeType, w, h); 1513 decodingSurface = new OutputSurface(w, h); 1514 decodingSurface.makeCurrent(); 1515 decoder.configure(decoderFormat, decodingSurface.getSurface(), null, 0); 1516 decoder.start(); 1517 1518 MediaFormat format = MediaFormat.createVideoFormat(mimeType, w, h); 1519 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 1520 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 1521 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1522 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1523 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 1524 encoder = MediaCodec.createEncoderByType(mimeType); 1525 encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 1526 encodingSurface = encoder.createInputSurface(); 1527 encoder.start(); 1528 1529 encoder.stop(); 1530 decoder.stop(); 1531 } catch (Exception e) { 1532 e.printStackTrace(); 1533 Log.i(TAG, "This H/W does not support w:" + w + " h:" + h); 1534 return false; 1535 } finally { 1536 if (encodingSurface != null) { 1537 encodingSurface.release(); 1538 } 1539 if (encoder != null) { 1540 encoder.release(); 1541 } 1542 if (decoder != null) { 1543 decoder.release(); 1544 } 1545 if (decodingSurface != null) { 1546 decodingSurface.release(); 1547 } 1548 } 1549 return true; 1550 } 1551 runOnMain(Runnable runner)1552 private static void runOnMain(Runnable runner) { 1553 sHandlerForRunOnMain.post(runner); 1554 } 1555 runOnMainSync(Runnable runner)1556 private static void runOnMainSync(Runnable runner) { 1557 SyncRunnable sr = new SyncRunnable(runner); 1558 sHandlerForRunOnMain.post(sr); 1559 sr.waitForComplete(); 1560 } 1561 1562 private static final class SyncRunnable implements Runnable { 1563 private final Runnable mTarget; 1564 private boolean mComplete; 1565 SyncRunnable(Runnable target)1566 public SyncRunnable(Runnable target) { 1567 mTarget = target; 1568 } 1569 run()1570 public void run() { 1571 mTarget.run(); 1572 synchronized (this) { 1573 mComplete = true; 1574 notifyAll(); 1575 } 1576 } 1577 waitForComplete()1578 public void waitForComplete() { 1579 synchronized (this) { 1580 while (!mComplete) { 1581 try { 1582 wait(); 1583 } catch (InterruptedException e) { 1584 //ignore 1585 } 1586 } 1587 } 1588 } 1589 } 1590 } 1591