1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.cts; 18 19 20 import android.app.Presentation; 21 import android.content.ComponentName; 22 import android.content.Context; 23 import android.content.Intent; 24 import android.content.ServiceConnection; 25 import android.graphics.SurfaceTexture; 26 import android.graphics.Typeface; 27 import android.graphics.drawable.ColorDrawable; 28 import android.hardware.display.DisplayManager; 29 import android.hardware.display.VirtualDisplay; 30 import android.media.MediaCodec; 31 import android.media.MediaCodec.BufferInfo; 32 import android.media.MediaCodecInfo; 33 import android.media.MediaCodecInfo.CodecCapabilities; 34 import android.media.MediaCodecInfo.CodecProfileLevel; 35 import android.media.MediaCodecList; 36 import android.media.MediaFormat; 37 import android.opengl.GLES11Ext; 38 import android.opengl.GLES20; 39 import android.opengl.Matrix; 40 import android.os.Bundle; 41 import android.os.Handler; 42 import android.os.IBinder; 43 import android.os.Looper; 44 import android.os.Message; 45 import android.os.Parcel; 46 import android.support.test.filters.SmallTest; 47 import android.platform.test.annotations.RequiresDevice; 48 import android.test.AndroidTestCase; 49 import android.util.AttributeSet; 50 import android.util.Log; 51 import android.util.Size; 52 import android.view.Display; 53 import android.view.Surface; 54 import android.view.TextureView; 55 import android.view.View; 56 import android.view.ViewGroup; 57 import android.view.ViewGroup.LayoutParams; 58 import android.view.WindowManager; 59 import android.widget.FrameLayout; 60 import android.widget.ImageView; 61 import android.widget.TextView; 62 63 import android.media.cts.R; 64 65 import java.nio.ByteBuffer; 66 import java.nio.ByteOrder; 67 import java.nio.FloatBuffer; 68 import java.nio.IntBuffer; 69 import java.util.ArrayList; 70 import java.util.HashMap; 71 import java.util.concurrent.Semaphore; 72 import java.util.concurrent.TimeUnit; 73 import java.util.concurrent.atomic.AtomicInteger; 74 75 /** 76 * Tests to check if MediaCodec encoding works with composition of multiple virtual displays 77 * The test also tries to destroy and create virtual displays repeatedly to 78 * detect any issues. The test itself does not check the output as it is already done in other 79 * tests. 80 */ 81 @SmallTest 82 @RequiresDevice 83 public class EncodeVirtualDisplayWithCompositionTest extends AndroidTestCase { 84 private static final String TAG = "EncodeVirtualDisplayWithCompositionTest"; 85 private static final boolean DBG = true; 86 private static final String MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC; 87 88 private static final long DEFAULT_WAIT_TIMEOUT_MS = 3000; 89 private static final long DEFAULT_WAIT_TIMEOUT_US = 3000000; 90 91 private static final int COLOR_RED = makeColor(100, 0, 0); 92 private static final int COLOR_BLUE = makeColor(0, 100, 0); 93 private static final int COLOR_GREEN = makeColor(0, 0, 100); 94 private static final int COLOR_GREY = makeColor(100, 100, 100); 95 96 private static final int BITRATE_1080p = 20000000; 97 private static final int BITRATE_720p = 14000000; 98 private static final int BITRATE_800x480 = 14000000; 99 private static final int BITRATE_DEFAULT = 10000000; 100 101 private static final int IFRAME_INTERVAL = 10; 102 103 private static final int MAX_NUM_WINDOWS = 3; 104 105 private static Handler sHandlerForRunOnMain = new Handler(Looper.getMainLooper()); 106 107 private Surface mEncodingSurface; 108 private OutputSurface mDecodingSurface; 109 private volatile boolean mCodecConfigReceived = false; 110 private volatile boolean mCodecBufferReceived = false; 111 private EncodingHelper mEncodingHelper; 112 private MediaCodec mDecoder; 113 private final ByteBuffer mPixelBuf = ByteBuffer.allocateDirect(4); 114 private volatile boolean mIsQuitting = false; 115 private Throwable mTestException; 116 private VirtualDisplayPresentation mLocalPresentation; 117 private RemoteVirtualDisplayPresentation mRemotePresentation; 118 private ByteBuffer[] mDecoderInputBuffers; 119 120 /** event listener for test without verifying output */ 121 private EncoderEventListener mEncoderEventListener = new EncoderEventListener() { 122 @Override 123 public void onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info) { 124 mCodecConfigReceived = true; 125 } 126 @Override 127 public void onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info) { 128 mCodecBufferReceived = true; 129 } 130 @Override 131 public void onError(String errorMessage) { 132 fail(errorMessage); 133 } 134 }; 135 136 /* TEST_COLORS static initialization; need ARGB for ColorDrawable */ makeColor(int red, int green, int blue)137 private static int makeColor(int red, int green, int blue) { 138 return 0xff << 24 | (red & 0xff) << 16 | (green & 0xff) << 8 | (blue & 0xff); 139 } 140 testVirtualDisplayRecycles()141 public void testVirtualDisplayRecycles() throws Exception { 142 doTestVirtualDisplayRecycles(3); 143 } 144 testRendering800x480Locally()145 public void testRendering800x480Locally() throws Throwable { 146 Log.i(TAG, "testRendering800x480Locally"); 147 if (isConcurrentEncodingDecodingSupported(800, 480, BITRATE_800x480)) { 148 runTestRenderingInSeparateThread(800, 480, false, false); 149 } else { 150 Log.i(TAG, "SKIPPING testRendering800x480Locally(): codec not supported"); 151 } 152 } 153 testRenderingMaxResolutionLocally()154 public void testRenderingMaxResolutionLocally() throws Throwable { 155 Log.i(TAG, "testRenderingMaxResolutionLocally"); 156 Size maxRes = checkMaxConcurrentEncodingDecodingResolution(); 157 if (maxRes == null) { 158 Log.i(TAG, "SKIPPING testRenderingMaxResolutionLocally(): codec not supported"); 159 } else { 160 Log.w(TAG, "Trying resolution " + maxRes); 161 runTestRenderingInSeparateThread(maxRes.getWidth(), maxRes.getHeight(), false, false); 162 } 163 } 164 testRendering800x480Remotely()165 public void testRendering800x480Remotely() throws Throwable { 166 Log.i(TAG, "testRendering800x480Remotely"); 167 if (isConcurrentEncodingDecodingSupported(800, 480, BITRATE_800x480)) { 168 runTestRenderingInSeparateThread(800, 480, true, false); 169 } else { 170 Log.i(TAG, "SKIPPING testRendering800x480Remotely(): codec not supported"); 171 } 172 } 173 testRenderingMaxResolutionRemotely()174 public void testRenderingMaxResolutionRemotely() throws Throwable { 175 Log.i(TAG, "testRenderingMaxResolutionRemotely"); 176 Size maxRes = checkMaxConcurrentEncodingDecodingResolution(); 177 if (maxRes == null) { 178 Log.i(TAG, "SKIPPING testRenderingMaxResolutionRemotely(): codec not supported"); 179 } else { 180 Log.w(TAG, "Trying resolution " + maxRes); 181 runTestRenderingInSeparateThread(maxRes.getWidth(), maxRes.getHeight(), true, false); 182 } 183 } 184 testRendering800x480RemotelyWith3Windows()185 public void testRendering800x480RemotelyWith3Windows() throws Throwable { 186 Log.i(TAG, "testRendering800x480RemotelyWith3Windows"); 187 if (isConcurrentEncodingDecodingSupported(800, 480, BITRATE_800x480)) { 188 runTestRenderingInSeparateThread(800, 480, true, true); 189 } else { 190 Log.i(TAG, "SKIPPING testRendering800x480RemotelyWith3Windows(): codec not supported"); 191 } 192 } 193 testRendering800x480LocallyWith3Windows()194 public void testRendering800x480LocallyWith3Windows() throws Throwable { 195 Log.i(TAG, "testRendering800x480LocallyWith3Windows"); 196 if (isConcurrentEncodingDecodingSupported(800, 480, BITRATE_800x480)) { 197 runTestRenderingInSeparateThread(800, 480, false, true); 198 } else { 199 Log.i(TAG, "SKIPPING testRendering800x480LocallyWith3Windows(): codec not supported"); 200 } 201 } 202 203 /** 204 * Run rendering test in a separate thread. This is necessary as {@link OutputSurface} requires 205 * constructing it in a non-test thread. 206 * @param w 207 * @param h 208 * @throws Exception 209 */ runTestRenderingInSeparateThread(final int w, final int h, final boolean runRemotely, final boolean multipleWindows)210 private void runTestRenderingInSeparateThread(final int w, final int h, 211 final boolean runRemotely, final boolean multipleWindows) throws Throwable { 212 mTestException = null; 213 Thread renderingThread = new Thread(new Runnable() { 214 public void run() { 215 try { 216 doTestRenderingOutput(w, h, runRemotely, multipleWindows); 217 } catch (Throwable t) { 218 t.printStackTrace(); 219 mTestException = t; 220 } 221 } 222 }); 223 renderingThread.start(); 224 renderingThread.join(60000); 225 assertTrue(!renderingThread.isAlive()); 226 if (mTestException != null) { 227 throw mTestException; 228 } 229 } 230 doTestRenderingOutput(int w, int h, boolean runRemotely, boolean multipleWindows)231 private void doTestRenderingOutput(int w, int h, boolean runRemotely, boolean multipleWindows) 232 throws Throwable { 233 if (DBG) { 234 Log.i(TAG, "doTestRenderingOutput for w:" + w + " h:" + h); 235 } 236 try { 237 mIsQuitting = false; 238 mDecoder = MediaCodec.createDecoderByType(MIME_TYPE); 239 MediaFormat decoderFormat = MediaFormat.createVideoFormat(MIME_TYPE, w, h); 240 mDecodingSurface = new OutputSurface(w, h); 241 mDecoder.configure(decoderFormat, mDecodingSurface.getSurface(), null, 0); 242 mDecoder.start(); 243 mDecoderInputBuffers = mDecoder.getInputBuffers(); 244 245 mEncodingHelper = new EncodingHelper(); 246 mEncodingSurface = mEncodingHelper.startEncoding(w, h, 247 new EncoderEventListener() { 248 @Override 249 public void onCodecConfig(ByteBuffer data, BufferInfo info) { 250 if (DBG) { 251 Log.i(TAG, "onCodecConfig l:" + info.size); 252 } 253 handleEncodedData(data, info); 254 } 255 256 @Override 257 public void onBufferReady(ByteBuffer data, BufferInfo info) { 258 if (DBG) { 259 Log.i(TAG, "onBufferReady l:" + info.size); 260 } 261 handleEncodedData(data, info); 262 } 263 264 @Override 265 public void onError(String errorMessage) { 266 fail(errorMessage); 267 } 268 269 private void handleEncodedData(ByteBuffer data, BufferInfo info) { 270 if (mIsQuitting) { 271 if (DBG) { 272 Log.i(TAG, "ignore data as test is quitting"); 273 } 274 return; 275 } 276 int inputBufferIndex = mDecoder.dequeueInputBuffer(DEFAULT_WAIT_TIMEOUT_US); 277 if (inputBufferIndex < 0) { 278 if (DBG) { 279 Log.i(TAG, "dequeueInputBuffer returned:" + inputBufferIndex); 280 } 281 return; 282 } 283 assertTrue(inputBufferIndex >= 0); 284 ByteBuffer inputBuffer = mDecoderInputBuffers[inputBufferIndex]; 285 inputBuffer.clear(); 286 inputBuffer.put(data); 287 mDecoder.queueInputBuffer(inputBufferIndex, 0, info.size, 288 info.presentationTimeUs, info.flags); 289 } 290 }); 291 GlCompositor compositor = new GlCompositor(); 292 if (DBG) { 293 Log.i(TAG, "start composition"); 294 } 295 compositor.startComposition(mEncodingSurface, w, h, multipleWindows ? 3 : 1); 296 297 if (DBG) { 298 Log.i(TAG, "create display"); 299 } 300 301 Renderer renderer = null; 302 if (runRemotely) { 303 mRemotePresentation = new RemoteVirtualDisplayPresentation(getContext(), 304 compositor.getWindowSurface(multipleWindows? 1 : 0), w, h); 305 mRemotePresentation.connect(); 306 mRemotePresentation.start(); 307 renderer = mRemotePresentation; 308 } else { 309 mLocalPresentation = new VirtualDisplayPresentation(getContext(), 310 compositor.getWindowSurface(multipleWindows? 1 : 0), w, h); 311 mLocalPresentation.createVirtualDisplay(); 312 mLocalPresentation.createPresentation(); 313 renderer = mLocalPresentation; 314 } 315 316 if (DBG) { 317 Log.i(TAG, "start rendering and check"); 318 } 319 renderColorAndCheckResult(renderer, w, h, COLOR_RED); 320 renderColorAndCheckResult(renderer, w, h, COLOR_BLUE); 321 renderColorAndCheckResult(renderer, w, h, COLOR_GREEN); 322 renderColorAndCheckResult(renderer, w, h, COLOR_GREY); 323 324 mIsQuitting = true; 325 if (runRemotely) { 326 mRemotePresentation.disconnect(); 327 } else { 328 mLocalPresentation.dismissPresentation(); 329 mLocalPresentation.destroyVirtualDisplay(); 330 } 331 332 compositor.stopComposition(); 333 } finally { 334 if (mEncodingHelper != null) { 335 mEncodingHelper.stopEncoding(); 336 mEncodingHelper = null; 337 } 338 if (mDecoder != null) { 339 mDecoder.stop(); 340 mDecoder.release(); 341 mDecoder = null; 342 } 343 if (mDecodingSurface != null) { 344 mDecodingSurface.release(); 345 mDecodingSurface = null; 346 } 347 } 348 } 349 350 private static final int NUM_MAX_RETRY = 120; 351 private static final int IMAGE_WAIT_TIMEOUT_MS = 1000; 352 renderColorAndCheckResult(Renderer renderer, int w, int h, int color)353 private void renderColorAndCheckResult(Renderer renderer, int w, int h, 354 int color) throws Exception { 355 BufferInfo info = new BufferInfo(); 356 for (int i = 0; i < NUM_MAX_RETRY; i++) { 357 renderer.doRendering(color); 358 int bufferIndex = mDecoder.dequeueOutputBuffer(info, DEFAULT_WAIT_TIMEOUT_US); 359 if (DBG) { 360 Log.i(TAG, "decoder dequeueOutputBuffer returned " + bufferIndex); 361 } 362 if (bufferIndex < 0) { 363 continue; 364 } 365 mDecoder.releaseOutputBuffer(bufferIndex, true); 366 if (mDecodingSurface.checkForNewImage(IMAGE_WAIT_TIMEOUT_MS)) { 367 mDecodingSurface.drawImage(); 368 if (checkSurfaceFrameColor(w, h, color)) { 369 Log.i(TAG, "color " + Integer.toHexString(color) + " matched"); 370 return; 371 } 372 } else if(DBG) { 373 Log.i(TAG, "no rendering yet"); 374 } 375 } 376 fail("Color did not match"); 377 } 378 checkSurfaceFrameColor(int w, int h, int color)379 private boolean checkSurfaceFrameColor(int w, int h, int color) { 380 // Read a pixel from the center of the surface. Might want to read from multiple points 381 // and average them together. 382 int x = w / 2; 383 int y = h / 2; 384 GLES20.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); 385 int r = mPixelBuf.get(0) & 0xff; 386 int g = mPixelBuf.get(1) & 0xff; 387 int b = mPixelBuf.get(2) & 0xff; 388 389 int redExpected = (color >> 16) & 0xff; 390 int greenExpected = (color >> 8) & 0xff; 391 int blueExpected = color & 0xff; 392 if (approxEquals(redExpected, r) && approxEquals(greenExpected, g) 393 && approxEquals(blueExpected, b)) { 394 return true; 395 } 396 Log.i(TAG, "expected 0x" + Integer.toHexString(color) + " got 0x" 397 + Integer.toHexString(makeColor(r, g, b))); 398 return false; 399 } 400 401 /** 402 * Determines if two color values are approximately equal. 403 */ approxEquals(int expected, int actual)404 private static boolean approxEquals(int expected, int actual) { 405 final int MAX_DELTA = 4; 406 return Math.abs(expected - actual) <= MAX_DELTA; 407 } 408 409 private static final int NUM_CODEC_CREATION = 5; 410 private static final int NUM_DISPLAY_CREATION = 10; 411 private static final int NUM_RENDERING = 10; doTestVirtualDisplayRecycles(int numDisplays)412 private void doTestVirtualDisplayRecycles(int numDisplays) throws Exception { 413 Size maxSize = getMaxSupportedEncoderSize(); 414 if (maxSize == null) { 415 Log.i(TAG, "no codec found, skipping"); 416 return; 417 } 418 VirtualDisplayPresentation[] virtualDisplays = new VirtualDisplayPresentation[numDisplays]; 419 for (int i = 0; i < NUM_CODEC_CREATION; i++) { 420 mCodecConfigReceived = false; 421 mCodecBufferReceived = false; 422 if (DBG) { 423 Log.i(TAG, "start encoding"); 424 } 425 EncodingHelper encodingHelper = new EncodingHelper(); 426 try { 427 mEncodingSurface = encodingHelper.startEncoding( 428 maxSize.getWidth(), maxSize.getHeight(), mEncoderEventListener); 429 GlCompositor compositor = new GlCompositor(); 430 if (DBG) { 431 Log.i(TAG, "start composition"); 432 } 433 compositor.startComposition(mEncodingSurface, 434 maxSize.getWidth(), maxSize.getHeight(), numDisplays); 435 for (int j = 0; j < NUM_DISPLAY_CREATION; j++) { 436 if (DBG) { 437 Log.i(TAG, "create display"); 438 } 439 for (int k = 0; k < numDisplays; k++) { 440 virtualDisplays[k] = 441 new VirtualDisplayPresentation(getContext(), 442 compositor.getWindowSurface(k), 443 maxSize.getWidth()/numDisplays, maxSize.getHeight()); 444 virtualDisplays[k].createVirtualDisplay(); 445 virtualDisplays[k].createPresentation(); 446 } 447 if (DBG) { 448 Log.i(TAG, "start rendering"); 449 } 450 for (int k = 0; k < NUM_RENDERING; k++) { 451 for (int l = 0; l < numDisplays; l++) { 452 virtualDisplays[l].doRendering(COLOR_RED); 453 } 454 // do not care how many frames are actually rendered. 455 Thread.sleep(1); 456 } 457 for (int k = 0; k < numDisplays; k++) { 458 virtualDisplays[k].dismissPresentation(); 459 virtualDisplays[k].destroyVirtualDisplay(); 460 } 461 compositor.recreateWindows(); 462 } 463 if (DBG) { 464 Log.i(TAG, "stop composition"); 465 } 466 compositor.stopComposition(); 467 } finally { 468 if (DBG) { 469 Log.i(TAG, "stop encoding"); 470 } 471 encodingHelper.stopEncoding(); 472 assertTrue(mCodecConfigReceived); 473 assertTrue(mCodecBufferReceived); 474 } 475 } 476 } 477 478 interface EncoderEventListener { onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info)479 public void onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info); onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info)480 public void onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info); onError(String errorMessage)481 public void onError(String errorMessage); 482 } 483 484 private class EncodingHelper { 485 private MediaCodec mEncoder; 486 private volatile boolean mStopEncoding = false; 487 private EncoderEventListener mEventListener; 488 private int mW; 489 private int mH; 490 private Thread mEncodingThread; 491 private Surface mEncodingSurface; 492 private Semaphore mInitCompleted = new Semaphore(0); 493 startEncoding(int w, int h, EncoderEventListener eventListener)494 Surface startEncoding(int w, int h, EncoderEventListener eventListener) { 495 mStopEncoding = false; 496 mW = w; 497 mH = h; 498 mEventListener = eventListener; 499 mEncodingThread = new Thread(new Runnable() { 500 @Override 501 public void run() { 502 try { 503 doEncoding(); 504 } catch (Exception e) { 505 e.printStackTrace(); 506 mEventListener.onError(e.toString()); 507 } 508 } 509 }); 510 mEncodingThread.start(); 511 try { 512 if (DBG) { 513 Log.i(TAG, "wait for encoder init"); 514 } 515 mInitCompleted.acquire(); 516 if (DBG) { 517 Log.i(TAG, "wait for encoder done"); 518 } 519 } catch (InterruptedException e) { 520 fail("should not happen"); 521 } 522 return mEncodingSurface; 523 } 524 stopEncoding()525 void stopEncoding() { 526 try { 527 mStopEncoding = true; 528 mEncodingThread.join(); 529 } catch(InterruptedException e) { 530 // just ignore 531 } finally { 532 mEncodingThread = null; 533 } 534 } 535 doEncoding()536 private void doEncoding() throws Exception { 537 final int TIMEOUT_USEC_NORMAL = 1000000; 538 MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mW, mH); 539 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 540 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 541 int bitRate = BITRATE_DEFAULT; 542 if (mW == 1920 && mH == 1080) { 543 bitRate = BITRATE_1080p; 544 } else if (mW == 1280 && mH == 720) { 545 bitRate = BITRATE_720p; 546 } else if (mW == 800 && mH == 480) { 547 bitRate = BITRATE_800x480; 548 } 549 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 550 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 551 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 552 553 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 554 String codecName = null; 555 if ((codecName = mcl.findEncoderForFormat(format)) == null) { 556 throw new RuntimeException("encoder "+ MIME_TYPE + " not support : " + format.toString()); 557 } 558 559 try { 560 mEncoder = MediaCodec.createByCodecName(codecName); 561 mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 562 mEncodingSurface = mEncoder.createInputSurface(); 563 mEncoder.start(); 564 mInitCompleted.release(); 565 if (DBG) { 566 Log.i(TAG, "starting encoder"); 567 } 568 ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers(); 569 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 570 while (!mStopEncoding) { 571 int index = mEncoder.dequeueOutputBuffer(info, TIMEOUT_USEC_NORMAL); 572 if (DBG) { 573 Log.i(TAG, "encoder dequeOutputBuffer returned " + index); 574 } 575 if (index >= 0) { 576 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 577 Log.i(TAG, "codec config data"); 578 ByteBuffer encodedData = encoderOutputBuffers[index]; 579 encodedData.position(info.offset); 580 encodedData.limit(info.offset + info.size); 581 mEventListener.onCodecConfig(encodedData, info); 582 mEncoder.releaseOutputBuffer(index, false); 583 } else if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 584 Log.i(TAG, "EOS, stopping encoding"); 585 break; 586 } else { 587 ByteBuffer encodedData = encoderOutputBuffers[index]; 588 encodedData.position(info.offset); 589 encodedData.limit(info.offset + info.size); 590 mEventListener.onBufferReady(encodedData, info); 591 mEncoder.releaseOutputBuffer(index, false); 592 } 593 } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED){ 594 Log.i(TAG, "output buffer changed"); 595 encoderOutputBuffers = mEncoder.getOutputBuffers(); 596 } 597 } 598 } catch (Exception e) { 599 e.printStackTrace(); 600 throw e; 601 } finally { 602 if (mEncoder != null) { 603 mEncoder.stop(); 604 mEncoder.release(); 605 mEncoder = null; 606 } 607 if (mEncodingSurface != null) { 608 mEncodingSurface.release(); 609 mEncodingSurface = null; 610 } 611 } 612 } 613 } 614 615 /** 616 * Handles composition of multiple SurfaceTexture into a single Surface 617 */ 618 private class GlCompositor implements SurfaceTexture.OnFrameAvailableListener { 619 private Surface mSurface; 620 private int mWidth; 621 private int mHeight; 622 private volatile int mNumWindows; 623 private GlWindow mTopWindow; 624 private Thread mCompositionThread; 625 private Semaphore mStartCompletionSemaphore; 626 private Semaphore mRecreationCompletionSemaphore; 627 private Looper mLooper; 628 private Handler mHandler; 629 private InputSurface mEglHelper; 630 private int mGlProgramId = 0; 631 private int mGluMVPMatrixHandle; 632 private int mGluSTMatrixHandle; 633 private int mGlaPositionHandle; 634 private int mGlaTextureHandle; 635 private float[] mMVPMatrix = new float[16]; 636 private TopWindowVirtualDisplayPresentation mTopPresentation; 637 638 private static final String VERTEX_SHADER = 639 "uniform mat4 uMVPMatrix;\n" + 640 "uniform mat4 uSTMatrix;\n" + 641 "attribute vec4 aPosition;\n" + 642 "attribute vec4 aTextureCoord;\n" + 643 "varying vec2 vTextureCoord;\n" + 644 "void main() {\n" + 645 " gl_Position = uMVPMatrix * aPosition;\n" + 646 " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + 647 "}\n"; 648 649 private static final String FRAGMENT_SHADER = 650 "#extension GL_OES_EGL_image_external : require\n" + 651 "precision mediump float;\n" + 652 "varying vec2 vTextureCoord;\n" + 653 "uniform samplerExternalOES sTexture;\n" + 654 "void main() {\n" + 655 " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + 656 "}\n"; 657 startComposition(Surface surface, int w, int h, int numWindows)658 void startComposition(Surface surface, int w, int h, int numWindows) throws Exception { 659 mSurface = surface; 660 mWidth = w; 661 mHeight = h; 662 mNumWindows = numWindows; 663 mCompositionThread = new Thread(new CompositionRunnable()); 664 mStartCompletionSemaphore = new Semaphore(0); 665 mCompositionThread.start(); 666 waitForStartCompletion(); 667 } 668 stopComposition()669 void stopComposition() { 670 try { 671 if (mLooper != null) { 672 mLooper.quit(); 673 mCompositionThread.join(); 674 } 675 } catch (InterruptedException e) { 676 // don't care 677 } 678 mCompositionThread = null; 679 mSurface = null; 680 mStartCompletionSemaphore = null; 681 } 682 getWindowSurface(int windowIndex)683 Surface getWindowSurface(int windowIndex) { 684 return mTopPresentation.getSurface(windowIndex); 685 } 686 recreateWindows()687 void recreateWindows() throws Exception { 688 mRecreationCompletionSemaphore = new Semaphore(0); 689 Message msg = mHandler.obtainMessage(CompositionHandler.DO_RECREATE_WINDOWS); 690 mHandler.sendMessage(msg); 691 if(!mRecreationCompletionSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, 692 TimeUnit.MILLISECONDS)) { 693 fail("recreation timeout"); 694 } 695 mTopPresentation.waitForSurfaceReady(DEFAULT_WAIT_TIMEOUT_MS); 696 } 697 698 @Override onFrameAvailable(SurfaceTexture surface)699 public void onFrameAvailable(SurfaceTexture surface) { 700 if (DBG) { 701 Log.i(TAG, "onFrameAvailable " + surface); 702 } 703 GlWindow w = mTopWindow; 704 if (w != null) { 705 w.markTextureUpdated(); 706 requestUpdate(); 707 } else { 708 Log.w(TAG, "top window gone"); 709 } 710 } 711 requestUpdate()712 private void requestUpdate() { 713 Thread compositionThread = mCompositionThread; 714 if (compositionThread == null || !compositionThread.isAlive()) { 715 return; 716 } 717 Message msg = mHandler.obtainMessage(CompositionHandler.DO_RENDERING); 718 mHandler.sendMessage(msg); 719 } 720 loadShader(int shaderType, String source)721 private int loadShader(int shaderType, String source) throws GlException { 722 int shader = GLES20.glCreateShader(shaderType); 723 checkGlError("glCreateShader type=" + shaderType); 724 GLES20.glShaderSource(shader, source); 725 GLES20.glCompileShader(shader); 726 int[] compiled = new int[1]; 727 GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); 728 if (compiled[0] == 0) { 729 Log.e(TAG, "Could not compile shader " + shaderType + ":"); 730 Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); 731 GLES20.glDeleteShader(shader); 732 shader = 0; 733 } 734 return shader; 735 } 736 createProgram(String vertexSource, String fragmentSource)737 private int createProgram(String vertexSource, String fragmentSource) throws GlException { 738 int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); 739 if (vertexShader == 0) { 740 return 0; 741 } 742 int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); 743 if (pixelShader == 0) { 744 return 0; 745 } 746 747 int program = GLES20.glCreateProgram(); 748 checkGlError("glCreateProgram"); 749 if (program == 0) { 750 Log.e(TAG, "Could not create program"); 751 } 752 GLES20.glAttachShader(program, vertexShader); 753 checkGlError("glAttachShader"); 754 GLES20.glAttachShader(program, pixelShader); 755 checkGlError("glAttachShader"); 756 GLES20.glLinkProgram(program); 757 int[] linkStatus = new int[1]; 758 GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); 759 if (linkStatus[0] != GLES20.GL_TRUE) { 760 Log.e(TAG, "Could not link program: "); 761 Log.e(TAG, GLES20.glGetProgramInfoLog(program)); 762 GLES20.glDeleteProgram(program); 763 program = 0; 764 } 765 return program; 766 } 767 initGl()768 private void initGl() throws GlException { 769 mEglHelper = new InputSurface(mSurface); 770 mEglHelper.makeCurrent(); 771 mGlProgramId = createProgram(VERTEX_SHADER, FRAGMENT_SHADER); 772 mGlaPositionHandle = GLES20.glGetAttribLocation(mGlProgramId, "aPosition"); 773 checkGlError("glGetAttribLocation aPosition"); 774 if (mGlaPositionHandle == -1) { 775 throw new RuntimeException("Could not get attrib location for aPosition"); 776 } 777 mGlaTextureHandle = GLES20.glGetAttribLocation(mGlProgramId, "aTextureCoord"); 778 checkGlError("glGetAttribLocation aTextureCoord"); 779 if (mGlaTextureHandle == -1) { 780 throw new RuntimeException("Could not get attrib location for aTextureCoord"); 781 } 782 mGluMVPMatrixHandle = GLES20.glGetUniformLocation(mGlProgramId, "uMVPMatrix"); 783 checkGlError("glGetUniformLocation uMVPMatrix"); 784 if (mGluMVPMatrixHandle == -1) { 785 throw new RuntimeException("Could not get attrib location for uMVPMatrix"); 786 } 787 mGluSTMatrixHandle = GLES20.glGetUniformLocation(mGlProgramId, "uSTMatrix"); 788 checkGlError("glGetUniformLocation uSTMatrix"); 789 if (mGluSTMatrixHandle == -1) { 790 throw new RuntimeException("Could not get attrib location for uSTMatrix"); 791 } 792 Matrix.setIdentityM(mMVPMatrix, 0); 793 Log.i(TAG, "initGl w:" + mWidth + " h:" + mHeight); 794 GLES20.glViewport(0, 0, mWidth, mHeight); 795 float[] vMatrix = new float[16]; 796 float[] projMatrix = new float[16]; 797 // max window is from (0,0) to (mWidth - 1, mHeight - 1) 798 float wMid = mWidth / 2f; 799 float hMid = mHeight / 2f; 800 // look from positive z to hide windows in lower z 801 Matrix.setLookAtM(vMatrix, 0, wMid, hMid, 5f, wMid, hMid, 0f, 0f, 1.0f, 0.0f); 802 Matrix.orthoM(projMatrix, 0, -wMid, wMid, -hMid, hMid, 1, 10); 803 Matrix.multiplyMM(mMVPMatrix, 0, projMatrix, 0, vMatrix, 0); 804 createWindows(); 805 806 } 807 createWindows()808 private void createWindows() throws GlException { 809 mTopWindow = new GlWindow(this, 0, 0, mWidth, mHeight); 810 mTopWindow.init(); 811 mTopPresentation = new TopWindowVirtualDisplayPresentation(mContext, 812 mTopWindow.getSurface(), mWidth, mHeight, mNumWindows); 813 mTopPresentation.createVirtualDisplay(); 814 mTopPresentation.createPresentation(); 815 ((TopWindowPresentation) mTopPresentation.getPresentation()).populateWindows(); 816 } 817 cleanupGl()818 private void cleanupGl() { 819 if (mTopPresentation != null) { 820 mTopPresentation.dismissPresentation(); 821 mTopPresentation.destroyVirtualDisplay(); 822 mTopPresentation = null; 823 } 824 if (mTopWindow != null) { 825 mTopWindow.cleanup(); 826 mTopWindow = null; 827 } 828 if (mEglHelper != null) { 829 mEglHelper.release(); 830 mEglHelper = null; 831 } 832 } 833 doGlRendering()834 private void doGlRendering() throws GlException { 835 if (DBG) { 836 Log.i(TAG, "doGlRendering"); 837 } 838 mTopWindow.updateTexImageIfNecessary(); 839 GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f); 840 GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); 841 842 GLES20.glUseProgram(mGlProgramId); 843 GLES20.glUniformMatrix4fv(mGluMVPMatrixHandle, 1, false, mMVPMatrix, 0); 844 mTopWindow.onDraw(mGluSTMatrixHandle, mGlaPositionHandle, mGlaTextureHandle); 845 checkGlError("window draw"); 846 if (DBG) { 847 final IntBuffer pixels = IntBuffer.allocate(1); 848 GLES20.glReadPixels(mWidth / 2, mHeight / 2, 1, 1, 849 GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixels); 850 Log.i(TAG, "glReadPixels returned 0x" + Integer.toHexString(pixels.get(0))); 851 } 852 mEglHelper.swapBuffers(); 853 } 854 doRecreateWindows()855 private void doRecreateWindows() throws GlException { 856 mTopPresentation.dismissPresentation(); 857 mTopPresentation.destroyVirtualDisplay(); 858 mTopWindow.cleanup(); 859 createWindows(); 860 mRecreationCompletionSemaphore.release(); 861 } 862 waitForStartCompletion()863 private void waitForStartCompletion() throws Exception { 864 if (!mStartCompletionSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, 865 TimeUnit.MILLISECONDS)) { 866 fail("start timeout"); 867 } 868 mStartCompletionSemaphore = null; 869 mTopPresentation.waitForSurfaceReady(DEFAULT_WAIT_TIMEOUT_MS); 870 } 871 872 private class CompositionRunnable implements Runnable { 873 @Override run()874 public void run() { 875 try { 876 Looper.prepare(); 877 mLooper = Looper.myLooper(); 878 mHandler = new CompositionHandler(); 879 initGl(); 880 // init done 881 mStartCompletionSemaphore.release(); 882 Looper.loop(); 883 } catch (GlException e) { 884 e.printStackTrace(); 885 fail("got gl exception"); 886 } finally { 887 cleanupGl(); 888 mHandler = null; 889 mLooper = null; 890 } 891 } 892 } 893 894 private class CompositionHandler extends Handler { 895 private static final int DO_RENDERING = 1; 896 private static final int DO_RECREATE_WINDOWS = 2; 897 898 @Override handleMessage(Message msg)899 public void handleMessage(Message msg) { 900 try { 901 switch(msg.what) { 902 case DO_RENDERING: { 903 doGlRendering(); 904 } break; 905 case DO_RECREATE_WINDOWS: { 906 doRecreateWindows(); 907 } break; 908 } 909 } catch (GlException e) { 910 //ignore as this can happen during tearing down 911 } 912 } 913 } 914 915 private class GlWindow { 916 private static final int FLOAT_SIZE_BYTES = 4; 917 private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; 918 private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; 919 private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; 920 private int mBlX; 921 private int mBlY; 922 private int mWidth; 923 private int mHeight; 924 private int mTextureId = 0; // 0 is invalid 925 private volatile SurfaceTexture mSurfaceTexture; 926 private volatile Surface mSurface; 927 private FloatBuffer mVerticesData; 928 private float[] mSTMatrix = new float[16]; 929 private AtomicInteger mNumTextureUpdated = new AtomicInteger(0); 930 private GlCompositor mCompositor; 931 932 /** 933 * @param blX X coordinate of bottom-left point of window 934 * @param blY Y coordinate of bottom-left point of window 935 * @param w window width 936 * @param h window height 937 */ GlWindow(GlCompositor compositor, int blX, int blY, int w, int h)938 public GlWindow(GlCompositor compositor, int blX, int blY, int w, int h) { 939 mCompositor = compositor; 940 mBlX = blX; 941 mBlY = blY; 942 mWidth = w; 943 mHeight = h; 944 int trX = blX + w; 945 int trY = blY + h; 946 float[] vertices = new float[] { 947 // x, y, z, u, v 948 mBlX, mBlY, 0, 0, 0, 949 trX, mBlY, 0, 1, 0, 950 mBlX, trY, 0, 0, 1, 951 trX, trY, 0, 1, 1 952 }; 953 Log.i(TAG, "create window " + this + " blX:" + mBlX + " blY:" + mBlY + " trX:" + 954 trX + " trY:" + trY); 955 mVerticesData = ByteBuffer.allocateDirect( 956 vertices.length * FLOAT_SIZE_BYTES) 957 .order(ByteOrder.nativeOrder()).asFloatBuffer(); 958 mVerticesData.put(vertices).position(0); 959 } 960 961 /** 962 * initialize the window for composition. counter-part is cleanup() 963 * @throws GlException 964 */ init()965 public void init() throws GlException { 966 int[] textures = new int[1]; 967 GLES20.glGenTextures(1, textures, 0); 968 969 mTextureId = textures[0]; 970 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); 971 checkGlError("glBindTexture mTextureID"); 972 973 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 974 GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); 975 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 976 GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); 977 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, 978 GLES20.GL_CLAMP_TO_EDGE); 979 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, 980 GLES20.GL_CLAMP_TO_EDGE); 981 checkGlError("glTexParameter"); 982 mSurfaceTexture = new SurfaceTexture(mTextureId); 983 mSurfaceTexture.setDefaultBufferSize(mWidth, mHeight); 984 mSurface = new Surface(mSurfaceTexture); 985 mSurfaceTexture.setOnFrameAvailableListener(mCompositor); 986 } 987 cleanup()988 public void cleanup() { 989 mNumTextureUpdated.set(0); 990 if (mTextureId != 0) { 991 int[] textures = new int[] { 992 mTextureId 993 }; 994 GLES20.glDeleteTextures(1, textures, 0); 995 } 996 GLES20.glFinish(); 997 if (mSurface != null) { 998 mSurface.release(); 999 mSurface = null; 1000 } 1001 if (mSurfaceTexture != null) { 1002 mSurfaceTexture.release(); 1003 mSurfaceTexture = null; 1004 } 1005 } 1006 1007 /** 1008 * make texture as updated so that it can be updated in the next rendering. 1009 */ markTextureUpdated()1010 public void markTextureUpdated() { 1011 mNumTextureUpdated.incrementAndGet(); 1012 } 1013 1014 /** 1015 * update texture for rendering if it is updated. 1016 */ updateTexImageIfNecessary()1017 public void updateTexImageIfNecessary() { 1018 int numTextureUpdated = mNumTextureUpdated.getAndDecrement(); 1019 if (numTextureUpdated > 0) { 1020 if (DBG) { 1021 Log.i(TAG, "updateTexImageIfNecessary " + this); 1022 } 1023 mSurfaceTexture.updateTexImage(); 1024 mSurfaceTexture.getTransformMatrix(mSTMatrix); 1025 } 1026 if (numTextureUpdated < 0) { 1027 fail("should not happen"); 1028 } 1029 } 1030 1031 /** 1032 * draw the window. It will not be drawn at all if the window is not visible. 1033 * @param uSTMatrixHandle shader handler for the STMatrix for texture coordinates 1034 * mapping 1035 * @param aPositionHandle shader handle for vertex position. 1036 * @param aTextureHandle shader handle for texture 1037 */ onDraw(int uSTMatrixHandle, int aPositionHandle, int aTextureHandle)1038 public void onDraw(int uSTMatrixHandle, int aPositionHandle, int aTextureHandle) { 1039 GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 1040 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); 1041 mVerticesData.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); 1042 GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false, 1043 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mVerticesData); 1044 GLES20.glEnableVertexAttribArray(aPositionHandle); 1045 1046 mVerticesData.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); 1047 GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false, 1048 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mVerticesData); 1049 GLES20.glEnableVertexAttribArray(aTextureHandle); 1050 GLES20.glUniformMatrix4fv(uSTMatrixHandle, 1, false, mSTMatrix, 0); 1051 GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 1052 } 1053 getSurfaceTexture()1054 public SurfaceTexture getSurfaceTexture() { 1055 return mSurfaceTexture; 1056 } 1057 getSurface()1058 public Surface getSurface() { 1059 return mSurface; 1060 } 1061 } 1062 } 1063 checkGlError(String op)1064 static void checkGlError(String op) throws GlException { 1065 int error; 1066 while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { 1067 Log.e(TAG, op + ": glError " + error); 1068 throw new GlException(op + ": glError " + error); 1069 } 1070 } 1071 1072 public static class GlException extends Exception { GlException(String msg)1073 public GlException(String msg) { 1074 super(msg); 1075 } 1076 } 1077 1078 private interface Renderer { doRendering(final int color)1079 void doRendering(final int color) throws Exception; 1080 } 1081 1082 private static class VirtualDisplayPresentation implements Renderer { 1083 protected final Context mContext; 1084 protected final Surface mSurface; 1085 protected final int mWidth; 1086 protected final int mHeight; 1087 protected VirtualDisplay mVirtualDisplay; 1088 protected TestPresentationBase mPresentation; 1089 private final DisplayManager mDisplayManager; 1090 VirtualDisplayPresentation(Context context, Surface surface, int w, int h)1091 VirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1092 mContext = context; 1093 mSurface = surface; 1094 mWidth = w; 1095 mHeight = h; 1096 mDisplayManager = (DisplayManager)context.getSystemService(Context.DISPLAY_SERVICE); 1097 } 1098 createVirtualDisplay()1099 void createVirtualDisplay() { 1100 runOnMainSync(new Runnable() { 1101 @Override 1102 public void run() { 1103 mVirtualDisplay = mDisplayManager.createVirtualDisplay( 1104 TAG, mWidth, mHeight, 200, mSurface, 1105 DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY | 1106 DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION); 1107 } 1108 }); 1109 } 1110 destroyVirtualDisplay()1111 void destroyVirtualDisplay() { 1112 runOnMainSync(new Runnable() { 1113 @Override 1114 public void run() { 1115 mVirtualDisplay.release(); 1116 } 1117 }); 1118 } 1119 createPresentation()1120 void createPresentation() { 1121 runOnMainSync(new Runnable() { 1122 @Override 1123 public void run() { 1124 mPresentation = doCreatePresentation(); 1125 mPresentation.show(); 1126 } 1127 }); 1128 } 1129 doCreatePresentation()1130 protected TestPresentationBase doCreatePresentation() { 1131 return new TestPresentation(mContext, mVirtualDisplay.getDisplay()); 1132 } 1133 getPresentation()1134 TestPresentationBase getPresentation() { 1135 return mPresentation; 1136 } 1137 dismissPresentation()1138 void dismissPresentation() { 1139 runOnMainSync(new Runnable() { 1140 @Override 1141 public void run() { 1142 mPresentation.dismiss(); 1143 } 1144 }); 1145 } 1146 1147 @Override doRendering(final int color)1148 public void doRendering(final int color) throws Exception { 1149 runOnMainSync(new Runnable() { 1150 @Override 1151 public void run() { 1152 mPresentation.doRendering(color); 1153 } 1154 }); 1155 } 1156 } 1157 1158 private static class TestPresentationBase extends Presentation { 1159 TestPresentationBase(Context outerContext, Display display)1160 public TestPresentationBase(Context outerContext, Display display) { 1161 // This theme is required to prevent an extra view from obscuring the presentation 1162 super(outerContext, display, 1163 android.R.style.Theme_Holo_Light_NoActionBar_TranslucentDecor); 1164 getWindow().setType(WindowManager.LayoutParams.TYPE_PRIVATE_PRESENTATION); 1165 getWindow().addFlags(WindowManager.LayoutParams.FLAG_LOCAL_FOCUS_MODE); 1166 getWindow().addFlags(WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED); 1167 } 1168 doRendering(int color)1169 public void doRendering(int color) { 1170 // to be implemented by child 1171 } 1172 } 1173 1174 private static class TestPresentation extends TestPresentationBase { 1175 private ImageView mImageView; 1176 TestPresentation(Context outerContext, Display display)1177 public TestPresentation(Context outerContext, Display display) { 1178 super(outerContext, display); 1179 } 1180 1181 @Override onCreate(Bundle savedInstanceState)1182 protected void onCreate(Bundle savedInstanceState) { 1183 super.onCreate(savedInstanceState); 1184 mImageView = new ImageView(getContext()); 1185 mImageView.setImageDrawable(new ColorDrawable(COLOR_RED)); 1186 mImageView.setLayoutParams(new LayoutParams( 1187 LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); 1188 setContentView(mImageView); 1189 } 1190 doRendering(int color)1191 public void doRendering(int color) { 1192 if (DBG) { 1193 Log.i(TAG, "doRendering " + Integer.toHexString(color)); 1194 } 1195 mImageView.setImageDrawable(new ColorDrawable(color)); 1196 } 1197 } 1198 1199 private static class TopWindowPresentation extends TestPresentationBase { 1200 private FrameLayout[] mWindowsLayout = new FrameLayout[MAX_NUM_WINDOWS]; 1201 private CompositionTextureView[] mWindows = new CompositionTextureView[MAX_NUM_WINDOWS]; 1202 private final int mNumWindows; 1203 private final Semaphore mWindowWaitSemaphore = new Semaphore(0); 1204 TopWindowPresentation(int numWindows, Context outerContext, Display display)1205 public TopWindowPresentation(int numWindows, Context outerContext, Display display) { 1206 super(outerContext, display); 1207 mNumWindows = numWindows; 1208 } 1209 1210 @Override onCreate(Bundle savedInstanceState)1211 protected void onCreate(Bundle savedInstanceState) { 1212 super.onCreate(savedInstanceState); 1213 if (DBG) { 1214 Log.i(TAG, "TopWindowPresentation onCreate, numWindows " + mNumWindows); 1215 } 1216 setContentView(R.layout.composition_layout); 1217 mWindowsLayout[0] = (FrameLayout) findViewById(R.id.window0); 1218 mWindowsLayout[1] = (FrameLayout) findViewById(R.id.window1); 1219 mWindowsLayout[2] = (FrameLayout) findViewById(R.id.window2); 1220 } 1221 populateWindows()1222 public void populateWindows() { 1223 runOnMain(new Runnable() { 1224 public void run() { 1225 for (int i = 0; i < mNumWindows; i++) { 1226 mWindows[i] = new CompositionTextureView(getContext()); 1227 mWindows[i].setLayoutParams(new ViewGroup.LayoutParams( 1228 ViewGroup.LayoutParams.MATCH_PARENT, 1229 ViewGroup.LayoutParams.MATCH_PARENT)); 1230 mWindowsLayout[i].setVisibility(View.VISIBLE); 1231 mWindowsLayout[i].addView(mWindows[i]); 1232 mWindows[i].startListening(); 1233 } 1234 mWindowWaitSemaphore.release(); 1235 } 1236 }); 1237 } 1238 waitForSurfaceReady(long timeoutMs)1239 public void waitForSurfaceReady(long timeoutMs) throws Exception { 1240 mWindowWaitSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, TimeUnit.MILLISECONDS); 1241 for (int i = 0; i < mNumWindows; i++) { 1242 if(!mWindows[i].waitForSurfaceReady(timeoutMs)) { 1243 fail("surface wait timeout"); 1244 } 1245 } 1246 } 1247 getSurface(int windowIndex)1248 public Surface getSurface(int windowIndex) { 1249 Surface surface = mWindows[windowIndex].getSurface(); 1250 assertNotNull(surface); 1251 return surface; 1252 } 1253 } 1254 1255 private static class TopWindowVirtualDisplayPresentation extends VirtualDisplayPresentation { 1256 private final int mNumWindows; 1257 TopWindowVirtualDisplayPresentation(Context context, Surface surface, int w, int h, int numWindows)1258 TopWindowVirtualDisplayPresentation(Context context, Surface surface, int w, int h, 1259 int numWindows) { 1260 super(context, surface, w, h); 1261 assertNotNull(surface); 1262 mNumWindows = numWindows; 1263 } 1264 waitForSurfaceReady(long timeoutMs)1265 void waitForSurfaceReady(long timeoutMs) throws Exception { 1266 ((TopWindowPresentation) mPresentation).waitForSurfaceReady(timeoutMs); 1267 } 1268 getSurface(int windowIndex)1269 Surface getSurface(int windowIndex) { 1270 return ((TopWindowPresentation) mPresentation).getSurface(windowIndex); 1271 } 1272 doCreatePresentation()1273 protected TestPresentationBase doCreatePresentation() { 1274 return new TopWindowPresentation(mNumWindows, mContext, mVirtualDisplay.getDisplay()); 1275 } 1276 } 1277 1278 private static class RemoteVirtualDisplayPresentation implements Renderer { 1279 /** argument: Surface, int w, int h, return none */ 1280 private static final int BINDER_CMD_START = IBinder.FIRST_CALL_TRANSACTION; 1281 /** argument: int color, return none */ 1282 private static final int BINDER_CMD_RENDER = IBinder.FIRST_CALL_TRANSACTION + 1; 1283 1284 private final Context mContext; 1285 private final Surface mSurface; 1286 private final int mWidth; 1287 private final int mHeight; 1288 1289 private IBinder mService; 1290 private final Semaphore mConnectionWait = new Semaphore(0); 1291 private final ServiceConnection mConnection = new ServiceConnection() { 1292 1293 public void onServiceConnected(ComponentName arg0, IBinder arg1) { 1294 mService = arg1; 1295 mConnectionWait.release(); 1296 } 1297 1298 public void onServiceDisconnected(ComponentName arg0) { 1299 //ignore 1300 } 1301 1302 }; 1303 RemoteVirtualDisplayPresentation(Context context, Surface surface, int w, int h)1304 RemoteVirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1305 mContext = context; 1306 mSurface = surface; 1307 mWidth = w; 1308 mHeight = h; 1309 } 1310 connect()1311 void connect() throws Exception { 1312 Intent intent = new Intent(); 1313 intent.setClassName("android.media.cts", 1314 "android.media.cts.RemoteVirtualDisplayService"); 1315 mContext.bindService(intent, mConnection, Context.BIND_AUTO_CREATE); 1316 if (!mConnectionWait.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, TimeUnit.MILLISECONDS)) { 1317 fail("cannot bind to service"); 1318 } 1319 } 1320 disconnect()1321 void disconnect() { 1322 mContext.unbindService(mConnection); 1323 } 1324 start()1325 void start() throws Exception { 1326 Parcel parcel = Parcel.obtain(); 1327 mSurface.writeToParcel(parcel, 0); 1328 parcel.writeInt(mWidth); 1329 parcel.writeInt(mHeight); 1330 mService.transact(BINDER_CMD_START, parcel, null, 0); 1331 } 1332 1333 @Override doRendering(int color)1334 public void doRendering(int color) throws Exception { 1335 Parcel parcel = Parcel.obtain(); 1336 parcel.writeInt(color); 1337 mService.transact(BINDER_CMD_RENDER, parcel, null, 0); 1338 } 1339 } 1340 getMaxSupportedEncoderSize()1341 private static Size getMaxSupportedEncoderSize() { 1342 final Size[] standardSizes = new Size[] { 1343 new Size(1920, 1080), 1344 new Size(1280, 720), 1345 new Size(720, 480), 1346 new Size(352, 576) 1347 }; 1348 1349 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 1350 for (Size sz : standardSizes) { 1351 MediaFormat format = MediaFormat.createVideoFormat( 1352 MIME_TYPE, sz.getWidth(), sz.getHeight()); 1353 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 1354 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 1355 int bitRate = BITRATE_DEFAULT; 1356 if (sz.getWidth() == 1920 && sz.getHeight() == 1080) { 1357 bitRate = BITRATE_1080p; 1358 } else if (sz.getWidth() == 1280 && sz.getHeight() == 720) { 1359 bitRate = BITRATE_720p; 1360 } else if (sz.getWidth() == 800 && sz.getHeight() == 480) { 1361 bitRate = BITRATE_800x480; 1362 } 1363 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1364 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1365 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 1366 Log.i(TAG,"format = " + format.toString()); 1367 if (mcl.findEncoderForFormat(format) != null) { 1368 return sz; 1369 } 1370 } 1371 return null; 1372 } 1373 1374 /** 1375 * Check maximum concurrent encoding / decoding resolution allowed. 1376 * Some H/Ws cannot support maximum resolution reported in encoder if decoder is running 1377 * at the same time. 1378 * Check is done for 4 different levels: 1080p, 720p, 800x480, 480p 1379 * (The last one is required by CDD.) 1380 */ checkMaxConcurrentEncodingDecodingResolution()1381 private Size checkMaxConcurrentEncodingDecodingResolution() { 1382 if (isConcurrentEncodingDecodingSupported(1920, 1080, BITRATE_1080p)) { 1383 return new Size(1920, 1080); 1384 } else if (isConcurrentEncodingDecodingSupported(1280, 720, BITRATE_720p)) { 1385 return new Size(1280, 720); 1386 } else if (isConcurrentEncodingDecodingSupported(800, 480, BITRATE_800x480)) { 1387 return new Size(800, 480); 1388 } else if (isConcurrentEncodingDecodingSupported(720, 480, BITRATE_DEFAULT)) { 1389 return new Size(720, 480); 1390 } 1391 Log.i(TAG, "SKIPPING test: concurrent encoding and decoding is not supported"); 1392 return null; 1393 } 1394 isConcurrentEncodingDecodingSupported(int w, int h, int bitRate)1395 private boolean isConcurrentEncodingDecodingSupported(int w, int h, int bitRate) { 1396 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 1397 MediaFormat testFormat = MediaFormat.createVideoFormat(MIME_TYPE, w, h); 1398 testFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1399 testFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1400 if (mcl.findDecoderForFormat(testFormat) == null 1401 || mcl.findEncoderForFormat(testFormat) == null) { 1402 return false; 1403 } 1404 1405 MediaCodec decoder = null; 1406 OutputSurface decodingSurface = null; 1407 MediaCodec encoder = null; 1408 Surface encodingSurface = null; 1409 try { 1410 decoder = MediaCodec.createDecoderByType(MIME_TYPE); 1411 MediaFormat decoderFormat = MediaFormat.createVideoFormat(MIME_TYPE, w, h); 1412 decodingSurface = new OutputSurface(w, h); 1413 decodingSurface.makeCurrent(); 1414 decoder.configure(decoderFormat, decodingSurface.getSurface(), null, 0); 1415 decoder.start(); 1416 1417 MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, w, h); 1418 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 1419 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 1420 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1421 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1422 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 1423 encoder = MediaCodec.createEncoderByType(MIME_TYPE);; 1424 encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 1425 encodingSurface = encoder.createInputSurface(); 1426 encoder.start(); 1427 1428 encoder.stop(); 1429 decoder.stop(); 1430 } catch (Exception e) { 1431 e.printStackTrace(); 1432 Log.i(TAG, "This H/W does not support w:" + w + " h:" + h); 1433 return false; 1434 } finally { 1435 if (encodingSurface != null) { 1436 encodingSurface.release(); 1437 } 1438 if (encoder != null) { 1439 encoder.release(); 1440 } 1441 if (decoder != null) { 1442 decoder.release(); 1443 } 1444 if (decodingSurface != null) { 1445 decodingSurface.release(); 1446 } 1447 } 1448 return true; 1449 } 1450 runOnMain(Runnable runner)1451 private static void runOnMain(Runnable runner) { 1452 sHandlerForRunOnMain.post(runner); 1453 } 1454 runOnMainSync(Runnable runner)1455 private static void runOnMainSync(Runnable runner) { 1456 SyncRunnable sr = new SyncRunnable(runner); 1457 sHandlerForRunOnMain.post(sr); 1458 sr.waitForComplete(); 1459 } 1460 1461 private static final class SyncRunnable implements Runnable { 1462 private final Runnable mTarget; 1463 private boolean mComplete; 1464 SyncRunnable(Runnable target)1465 public SyncRunnable(Runnable target) { 1466 mTarget = target; 1467 } 1468 run()1469 public void run() { 1470 mTarget.run(); 1471 synchronized (this) { 1472 mComplete = true; 1473 notifyAll(); 1474 } 1475 } 1476 waitForComplete()1477 public void waitForComplete() { 1478 synchronized (this) { 1479 while (!mComplete) { 1480 try { 1481 wait(); 1482 } catch (InterruptedException e) { 1483 //ignore 1484 } 1485 } 1486 } 1487 } 1488 } 1489 } 1490