1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.cts; 18 19 20 import android.app.Presentation; 21 import android.content.ComponentName; 22 import android.content.Context; 23 import android.content.Intent; 24 import android.content.ServiceConnection; 25 import android.graphics.SurfaceTexture; 26 import android.graphics.Typeface; 27 import android.graphics.drawable.ColorDrawable; 28 import android.hardware.display.DisplayManager; 29 import android.hardware.display.VirtualDisplay; 30 import android.media.MediaCodec; 31 import android.media.MediaCodec.BufferInfo; 32 import android.media.MediaCodecInfo; 33 import android.media.MediaCodecInfo.CodecCapabilities; 34 import android.media.MediaCodecInfo.CodecProfileLevel; 35 import android.media.MediaCodecList; 36 import android.media.MediaFormat; 37 import android.opengl.GLES11Ext; 38 import android.opengl.GLES20; 39 import android.opengl.Matrix; 40 import android.os.Bundle; 41 import android.os.Handler; 42 import android.os.IBinder; 43 import android.os.Looper; 44 import android.os.Message; 45 import android.os.Parcel; 46 import android.test.AndroidTestCase; 47 import android.util.AttributeSet; 48 import android.util.Log; 49 import android.util.Size; 50 import android.view.Display; 51 import android.view.Surface; 52 import android.view.TextureView; 53 import android.view.View; 54 import android.view.ViewGroup; 55 import android.view.ViewGroup.LayoutParams; 56 import android.view.WindowManager; 57 import android.widget.FrameLayout; 58 import android.widget.ImageView; 59 import android.widget.TextView; 60 61 import com.android.cts.media.R; 62 63 import java.nio.ByteBuffer; 64 import java.nio.ByteOrder; 65 import java.nio.FloatBuffer; 66 import java.nio.IntBuffer; 67 import java.util.ArrayList; 68 import java.util.HashMap; 69 import java.util.concurrent.Semaphore; 70 import java.util.concurrent.TimeUnit; 71 import java.util.concurrent.atomic.AtomicInteger; 72 73 /** 74 * Tests to check if MediaCodec encoding works with composition of multiple virtual displays 75 * The test also tries to destroy and create virtual displays repeatedly to 76 * detect any issues. The test itself does not check the output as it is already done in other 77 * tests. 78 */ 79 public class EncodeVirtualDisplayWithCompositionTest extends AndroidTestCase { 80 private static final String TAG = "EncodeVirtualDisplayWithCompositionTest"; 81 private static final boolean DBG = true; 82 private static final String MIME_TYPE = MediaFormat.MIMETYPE_VIDEO_AVC; 83 84 private static final long DEFAULT_WAIT_TIMEOUT_MS = 3000; 85 private static final long DEFAULT_WAIT_TIMEOUT_US = 3000000; 86 87 private static final int COLOR_RED = makeColor(100, 0, 0); 88 private static final int COLOR_BLUE = makeColor(0, 100, 0); 89 private static final int COLOR_GREEN = makeColor(0, 0, 100); 90 private static final int COLOR_GREY = makeColor(100, 100, 100); 91 92 private static final int BITRATE_1080p = 20000000; 93 private static final int BITRATE_720p = 14000000; 94 private static final int BITRATE_800x480 = 14000000; 95 private static final int BITRATE_DEFAULT = 10000000; 96 97 private static final int IFRAME_INTERVAL = 10; 98 99 private static final int MAX_NUM_WINDOWS = 3; 100 101 private static Handler sHandlerForRunOnMain = new Handler(Looper.getMainLooper()); 102 103 private Surface mEncodingSurface; 104 private OutputSurface mDecodingSurface; 105 private volatile boolean mCodecConfigReceived = false; 106 private volatile boolean mCodecBufferReceived = false; 107 private EncodingHelper mEncodingHelper; 108 private MediaCodec mDecoder; 109 private final ByteBuffer mPixelBuf = ByteBuffer.allocateDirect(4); 110 private volatile boolean mIsQuitting = false; 111 private Throwable mTestException; 112 private VirtualDisplayPresentation mLocalPresentation; 113 private RemoteVirtualDisplayPresentation mRemotePresentation; 114 private ByteBuffer[] mDecoderInputBuffers; 115 116 /** event listener for test without verifying output */ 117 private EncoderEventListener mEncoderEventListener = new EncoderEventListener() { 118 @Override 119 public void onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info) { 120 mCodecConfigReceived = true; 121 } 122 @Override 123 public void onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info) { 124 mCodecBufferReceived = true; 125 } 126 @Override 127 public void onError(String errorMessage) { 128 fail(errorMessage); 129 } 130 }; 131 132 /* TEST_COLORS static initialization; need ARGB for ColorDrawable */ makeColor(int red, int green, int blue)133 private static int makeColor(int red, int green, int blue) { 134 return 0xff << 24 | (red & 0xff) << 16 | (green & 0xff) << 8 | (blue & 0xff); 135 } 136 testVirtualDisplayRecycles()137 public void testVirtualDisplayRecycles() throws Exception { 138 doTestVirtualDisplayRecycles(3); 139 } 140 testRendering800x480Locally()141 public void testRendering800x480Locally() throws Throwable { 142 Log.i(TAG, "testRendering800x480Locally"); 143 if (isConcurrentEncodingDecodingSupported(800, 480, BITRATE_800x480)) { 144 runTestRenderingInSeparateThread(800, 480, false, false); 145 } else { 146 Log.i(TAG, "SKIPPING testRendering800x480Locally(): codec not supported"); 147 } 148 } 149 testRenderingMaxResolutionLocally()150 public void testRenderingMaxResolutionLocally() throws Throwable { 151 Log.i(TAG, "testRenderingMaxResolutionLocally"); 152 Size maxRes = checkMaxConcurrentEncodingDecodingResolution(); 153 if (maxRes == null) { 154 Log.i(TAG, "SKIPPING testRenderingMaxResolutionLocally(): codec not supported"); 155 } else { 156 Log.w(TAG, "Trying resolution " + maxRes); 157 runTestRenderingInSeparateThread(maxRes.getWidth(), maxRes.getHeight(), false, false); 158 } 159 } 160 testRendering800x480Remotely()161 public void testRendering800x480Remotely() throws Throwable { 162 Log.i(TAG, "testRendering800x480Remotely"); 163 if (isConcurrentEncodingDecodingSupported(800, 480, BITRATE_800x480)) { 164 runTestRenderingInSeparateThread(800, 480, true, false); 165 } else { 166 Log.i(TAG, "SKIPPING testRendering800x480Remotely(): codec not supported"); 167 } 168 } 169 testRenderingMaxResolutionRemotely()170 public void testRenderingMaxResolutionRemotely() throws Throwable { 171 Log.i(TAG, "testRenderingMaxResolutionRemotely"); 172 Size maxRes = checkMaxConcurrentEncodingDecodingResolution(); 173 if (maxRes == null) { 174 Log.i(TAG, "SKIPPING testRenderingMaxResolutionRemotely(): codec not supported"); 175 } else { 176 Log.w(TAG, "Trying resolution " + maxRes); 177 runTestRenderingInSeparateThread(maxRes.getWidth(), maxRes.getHeight(), true, false); 178 } 179 } 180 testRendering800x480RemotelyWith3Windows()181 public void testRendering800x480RemotelyWith3Windows() throws Throwable { 182 Log.i(TAG, "testRendering800x480RemotelyWith3Windows"); 183 if (isConcurrentEncodingDecodingSupported(800, 480, BITRATE_800x480)) { 184 runTestRenderingInSeparateThread(800, 480, true, true); 185 } else { 186 Log.i(TAG, "SKIPPING testRendering800x480RemotelyWith3Windows(): codec not supported"); 187 } 188 } 189 testRendering800x480LocallyWith3Windows()190 public void testRendering800x480LocallyWith3Windows() throws Throwable { 191 Log.i(TAG, "testRendering800x480LocallyWith3Windows"); 192 if (isConcurrentEncodingDecodingSupported(800, 480, BITRATE_800x480)) { 193 runTestRenderingInSeparateThread(800, 480, false, true); 194 } else { 195 Log.i(TAG, "SKIPPING testRendering800x480LocallyWith3Windows(): codec not supported"); 196 } 197 } 198 199 /** 200 * Run rendering test in a separate thread. This is necessary as {@link OutputSurface} requires 201 * constructing it in a non-test thread. 202 * @param w 203 * @param h 204 * @throws Exception 205 */ runTestRenderingInSeparateThread(final int w, final int h, final boolean runRemotely, final boolean multipleWindows)206 private void runTestRenderingInSeparateThread(final int w, final int h, 207 final boolean runRemotely, final boolean multipleWindows) throws Throwable { 208 mTestException = null; 209 Thread renderingThread = new Thread(new Runnable() { 210 public void run() { 211 try { 212 doTestRenderingOutput(w, h, runRemotely, multipleWindows); 213 } catch (Throwable t) { 214 t.printStackTrace(); 215 mTestException = t; 216 } 217 } 218 }); 219 renderingThread.start(); 220 renderingThread.join(60000); 221 assertTrue(!renderingThread.isAlive()); 222 if (mTestException != null) { 223 throw mTestException; 224 } 225 } 226 doTestRenderingOutput(int w, int h, boolean runRemotely, boolean multipleWindows)227 private void doTestRenderingOutput(int w, int h, boolean runRemotely, boolean multipleWindows) 228 throws Throwable { 229 if (DBG) { 230 Log.i(TAG, "doTestRenderingOutput for w:" + w + " h:" + h); 231 } 232 try { 233 mIsQuitting = false; 234 mDecoder = MediaCodec.createDecoderByType(MIME_TYPE); 235 MediaFormat decoderFormat = MediaFormat.createVideoFormat(MIME_TYPE, w, h); 236 mDecodingSurface = new OutputSurface(w, h); 237 mDecoder.configure(decoderFormat, mDecodingSurface.getSurface(), null, 0); 238 mDecoder.start(); 239 mDecoderInputBuffers = mDecoder.getInputBuffers(); 240 241 mEncodingHelper = new EncodingHelper(); 242 mEncodingSurface = mEncodingHelper.startEncoding(w, h, 243 new EncoderEventListener() { 244 @Override 245 public void onCodecConfig(ByteBuffer data, BufferInfo info) { 246 if (DBG) { 247 Log.i(TAG, "onCodecConfig l:" + info.size); 248 } 249 handleEncodedData(data, info); 250 } 251 252 @Override 253 public void onBufferReady(ByteBuffer data, BufferInfo info) { 254 if (DBG) { 255 Log.i(TAG, "onBufferReady l:" + info.size); 256 } 257 handleEncodedData(data, info); 258 } 259 260 @Override 261 public void onError(String errorMessage) { 262 fail(errorMessage); 263 } 264 265 private void handleEncodedData(ByteBuffer data, BufferInfo info) { 266 if (mIsQuitting) { 267 if (DBG) { 268 Log.i(TAG, "ignore data as test is quitting"); 269 } 270 return; 271 } 272 int inputBufferIndex = mDecoder.dequeueInputBuffer(DEFAULT_WAIT_TIMEOUT_US); 273 if (inputBufferIndex < 0) { 274 if (DBG) { 275 Log.i(TAG, "dequeueInputBuffer returned:" + inputBufferIndex); 276 } 277 return; 278 } 279 assertTrue(inputBufferIndex >= 0); 280 ByteBuffer inputBuffer = mDecoderInputBuffers[inputBufferIndex]; 281 inputBuffer.clear(); 282 inputBuffer.put(data); 283 mDecoder.queueInputBuffer(inputBufferIndex, 0, info.size, 284 info.presentationTimeUs, info.flags); 285 } 286 }); 287 GlCompositor compositor = new GlCompositor(); 288 if (DBG) { 289 Log.i(TAG, "start composition"); 290 } 291 compositor.startComposition(mEncodingSurface, w, h, multipleWindows ? 3 : 1); 292 293 if (DBG) { 294 Log.i(TAG, "create display"); 295 } 296 297 Renderer renderer = null; 298 if (runRemotely) { 299 mRemotePresentation = new RemoteVirtualDisplayPresentation(getContext(), 300 compositor.getWindowSurface(multipleWindows? 1 : 0), w, h); 301 mRemotePresentation.connect(); 302 mRemotePresentation.start(); 303 renderer = mRemotePresentation; 304 } else { 305 mLocalPresentation = new VirtualDisplayPresentation(getContext(), 306 compositor.getWindowSurface(multipleWindows? 1 : 0), w, h); 307 mLocalPresentation.createVirtualDisplay(); 308 mLocalPresentation.createPresentation(); 309 renderer = mLocalPresentation; 310 } 311 312 if (DBG) { 313 Log.i(TAG, "start rendering and check"); 314 } 315 renderColorAndCheckResult(renderer, w, h, COLOR_RED); 316 renderColorAndCheckResult(renderer, w, h, COLOR_BLUE); 317 renderColorAndCheckResult(renderer, w, h, COLOR_GREEN); 318 renderColorAndCheckResult(renderer, w, h, COLOR_GREY); 319 320 mIsQuitting = true; 321 if (runRemotely) { 322 mRemotePresentation.disconnect(); 323 } else { 324 mLocalPresentation.dismissPresentation(); 325 mLocalPresentation.destroyVirtualDisplay(); 326 } 327 328 compositor.stopComposition(); 329 } finally { 330 if (mEncodingHelper != null) { 331 mEncodingHelper.stopEncoding(); 332 mEncodingHelper = null; 333 } 334 if (mDecoder != null) { 335 mDecoder.stop(); 336 mDecoder.release(); 337 mDecoder = null; 338 } 339 if (mDecodingSurface != null) { 340 mDecodingSurface.release(); 341 mDecodingSurface = null; 342 } 343 } 344 } 345 346 private static final int NUM_MAX_RETRY = 120; 347 private static final int IMAGE_WAIT_TIMEOUT_MS = 1000; 348 renderColorAndCheckResult(Renderer renderer, int w, int h, int color)349 private void renderColorAndCheckResult(Renderer renderer, int w, int h, 350 int color) throws Exception { 351 BufferInfo info = new BufferInfo(); 352 for (int i = 0; i < NUM_MAX_RETRY; i++) { 353 renderer.doRendering(color); 354 int bufferIndex = mDecoder.dequeueOutputBuffer(info, DEFAULT_WAIT_TIMEOUT_US); 355 if (DBG) { 356 Log.i(TAG, "decoder dequeueOutputBuffer returned " + bufferIndex); 357 } 358 if (bufferIndex < 0) { 359 continue; 360 } 361 mDecoder.releaseOutputBuffer(bufferIndex, true); 362 if (mDecodingSurface.checkForNewImage(IMAGE_WAIT_TIMEOUT_MS)) { 363 mDecodingSurface.drawImage(); 364 if (checkSurfaceFrameColor(w, h, color)) { 365 Log.i(TAG, "color " + Integer.toHexString(color) + " matched"); 366 return; 367 } 368 } else if(DBG) { 369 Log.i(TAG, "no rendering yet"); 370 } 371 } 372 fail("Color did not match"); 373 } 374 checkSurfaceFrameColor(int w, int h, int color)375 private boolean checkSurfaceFrameColor(int w, int h, int color) { 376 // Read a pixel from the center of the surface. Might want to read from multiple points 377 // and average them together. 378 int x = w / 2; 379 int y = h / 2; 380 GLES20.glReadPixels(x, y, 1, 1, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, mPixelBuf); 381 int r = mPixelBuf.get(0) & 0xff; 382 int g = mPixelBuf.get(1) & 0xff; 383 int b = mPixelBuf.get(2) & 0xff; 384 385 int redExpected = (color >> 16) & 0xff; 386 int greenExpected = (color >> 8) & 0xff; 387 int blueExpected = color & 0xff; 388 if (approxEquals(redExpected, r) && approxEquals(greenExpected, g) 389 && approxEquals(blueExpected, b)) { 390 return true; 391 } 392 Log.i(TAG, "expected 0x" + Integer.toHexString(color) + " got 0x" 393 + Integer.toHexString(makeColor(r, g, b))); 394 return false; 395 } 396 397 /** 398 * Determines if two color values are approximately equal. 399 */ approxEquals(int expected, int actual)400 private static boolean approxEquals(int expected, int actual) { 401 final int MAX_DELTA = 4; 402 return Math.abs(expected - actual) <= MAX_DELTA; 403 } 404 405 private static final int NUM_CODEC_CREATION = 5; 406 private static final int NUM_DISPLAY_CREATION = 10; 407 private static final int NUM_RENDERING = 10; doTestVirtualDisplayRecycles(int numDisplays)408 private void doTestVirtualDisplayRecycles(int numDisplays) throws Exception { 409 Size maxSize = getMaxSupportedEncoderSize(); 410 if (maxSize == null) { 411 Log.i(TAG, "no codec found, skipping"); 412 return; 413 } 414 VirtualDisplayPresentation[] virtualDisplays = new VirtualDisplayPresentation[numDisplays]; 415 for (int i = 0; i < NUM_CODEC_CREATION; i++) { 416 mCodecConfigReceived = false; 417 mCodecBufferReceived = false; 418 if (DBG) { 419 Log.i(TAG, "start encoding"); 420 } 421 EncodingHelper encodingHelper = new EncodingHelper(); 422 mEncodingSurface = encodingHelper.startEncoding(maxSize.getWidth(), maxSize.getHeight(), 423 mEncoderEventListener); 424 GlCompositor compositor = new GlCompositor(); 425 if (DBG) { 426 Log.i(TAG, "start composition"); 427 } 428 compositor.startComposition(mEncodingSurface, maxSize.getWidth(), maxSize.getHeight(), 429 numDisplays); 430 for (int j = 0; j < NUM_DISPLAY_CREATION; j++) { 431 if (DBG) { 432 Log.i(TAG, "create display"); 433 } 434 for (int k = 0; k < numDisplays; k++) { 435 virtualDisplays[k] = 436 new VirtualDisplayPresentation(getContext(), 437 compositor.getWindowSurface(k), 438 maxSize.getWidth()/numDisplays, maxSize.getHeight()); 439 virtualDisplays[k].createVirtualDisplay(); 440 virtualDisplays[k].createPresentation(); 441 } 442 if (DBG) { 443 Log.i(TAG, "start rendering"); 444 } 445 for (int k = 0; k < NUM_RENDERING; k++) { 446 for (int l = 0; l < numDisplays; l++) { 447 virtualDisplays[l].doRendering(COLOR_RED); 448 } 449 // do not care how many frames are actually rendered. 450 Thread.sleep(1); 451 } 452 for (int k = 0; k < numDisplays; k++) { 453 virtualDisplays[k].dismissPresentation(); 454 virtualDisplays[k].destroyVirtualDisplay(); 455 } 456 compositor.recreateWindows(); 457 } 458 if (DBG) { 459 Log.i(TAG, "stop composition"); 460 } 461 compositor.stopComposition(); 462 if (DBG) { 463 Log.i(TAG, "stop encoding"); 464 } 465 encodingHelper.stopEncoding(); 466 assertTrue(mCodecConfigReceived); 467 assertTrue(mCodecBufferReceived); 468 } 469 } 470 471 interface EncoderEventListener { onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info)472 public void onCodecConfig(ByteBuffer data, MediaCodec.BufferInfo info); onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info)473 public void onBufferReady(ByteBuffer data, MediaCodec.BufferInfo info); onError(String errorMessage)474 public void onError(String errorMessage); 475 } 476 477 private class EncodingHelper { 478 private MediaCodec mEncoder; 479 private volatile boolean mStopEncoding = false; 480 private EncoderEventListener mEventListener; 481 private int mW; 482 private int mH; 483 private Thread mEncodingThread; 484 private Surface mEncodingSurface; 485 private Semaphore mInitCompleted = new Semaphore(0); 486 startEncoding(int w, int h, EncoderEventListener eventListener)487 Surface startEncoding(int w, int h, EncoderEventListener eventListener) { 488 mStopEncoding = false; 489 mW = w; 490 mH = h; 491 mEventListener = eventListener; 492 mEncodingThread = new Thread(new Runnable() { 493 @Override 494 public void run() { 495 try { 496 doEncoding(); 497 } catch (Exception e) { 498 e.printStackTrace(); 499 mEventListener.onError(e.toString()); 500 } 501 } 502 }); 503 mEncodingThread.start(); 504 try { 505 if (DBG) { 506 Log.i(TAG, "wait for encoder init"); 507 } 508 mInitCompleted.acquire(); 509 if (DBG) { 510 Log.i(TAG, "wait for encoder done"); 511 } 512 } catch (InterruptedException e) { 513 fail("should not happen"); 514 } 515 return mEncodingSurface; 516 } 517 stopEncoding()518 void stopEncoding() { 519 try { 520 mStopEncoding = true; 521 mEncodingThread.join(); 522 } catch(InterruptedException e) { 523 // just ignore 524 } finally { 525 mEncodingThread = null; 526 } 527 } 528 doEncoding()529 private void doEncoding() throws Exception { 530 final int TIMEOUT_USEC_NORMAL = 1000000; 531 MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mW, mH); 532 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 533 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 534 int bitRate = BITRATE_DEFAULT; 535 if (mW == 1920 && mH == 1080) { 536 bitRate = BITRATE_1080p; 537 } else if (mW == 1280 && mH == 720) { 538 bitRate = BITRATE_720p; 539 } else if (mW == 800 && mH == 480) { 540 bitRate = BITRATE_800x480; 541 } 542 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 543 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 544 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 545 mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); 546 mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 547 mEncodingSurface = mEncoder.createInputSurface(); 548 mEncoder.start(); 549 mInitCompleted.release(); 550 if (DBG) { 551 Log.i(TAG, "starting encoder"); 552 } 553 try { 554 ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers(); 555 MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 556 while (!mStopEncoding) { 557 int index = mEncoder.dequeueOutputBuffer(info, TIMEOUT_USEC_NORMAL); 558 if (DBG) { 559 Log.i(TAG, "encoder dequeOutputBuffer returned " + index); 560 } 561 if (index >= 0) { 562 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 563 Log.i(TAG, "codec config data"); 564 ByteBuffer encodedData = encoderOutputBuffers[index]; 565 encodedData.position(info.offset); 566 encodedData.limit(info.offset + info.size); 567 mEventListener.onCodecConfig(encodedData, info); 568 mEncoder.releaseOutputBuffer(index, false); 569 } else if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 570 Log.i(TAG, "EOS, stopping encoding"); 571 break; 572 } else { 573 ByteBuffer encodedData = encoderOutputBuffers[index]; 574 encodedData.position(info.offset); 575 encodedData.limit(info.offset + info.size); 576 mEventListener.onBufferReady(encodedData, info); 577 mEncoder.releaseOutputBuffer(index, false); 578 } 579 } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED){ 580 Log.i(TAG, "output buffer changed"); 581 encoderOutputBuffers = mEncoder.getOutputBuffers(); 582 } 583 } 584 } catch (Exception e) { 585 e.printStackTrace(); 586 throw e; 587 } finally { 588 mEncoder.stop(); 589 mEncoder.release(); 590 mEncoder = null; 591 mEncodingSurface.release(); 592 mEncodingSurface = null; 593 } 594 } 595 } 596 597 /** 598 * Handles composition of multiple SurfaceTexture into a single Surface 599 */ 600 private class GlCompositor implements SurfaceTexture.OnFrameAvailableListener { 601 private Surface mSurface; 602 private int mWidth; 603 private int mHeight; 604 private volatile int mNumWindows; 605 private GlWindow mTopWindow; 606 private Thread mCompositionThread; 607 private Semaphore mStartCompletionSemaphore; 608 private Semaphore mRecreationCompletionSemaphore; 609 private Looper mLooper; 610 private Handler mHandler; 611 private InputSurface mEglHelper; 612 private int mGlProgramId = 0; 613 private int mGluMVPMatrixHandle; 614 private int mGluSTMatrixHandle; 615 private int mGlaPositionHandle; 616 private int mGlaTextureHandle; 617 private float[] mMVPMatrix = new float[16]; 618 private TopWindowVirtualDisplayPresentation mTopPresentation; 619 620 private static final String VERTEX_SHADER = 621 "uniform mat4 uMVPMatrix;\n" + 622 "uniform mat4 uSTMatrix;\n" + 623 "attribute vec4 aPosition;\n" + 624 "attribute vec4 aTextureCoord;\n" + 625 "varying vec2 vTextureCoord;\n" + 626 "void main() {\n" + 627 " gl_Position = uMVPMatrix * aPosition;\n" + 628 " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + 629 "}\n"; 630 631 private static final String FRAGMENT_SHADER = 632 "#extension GL_OES_EGL_image_external : require\n" + 633 "precision mediump float;\n" + 634 "varying vec2 vTextureCoord;\n" + 635 "uniform samplerExternalOES sTexture;\n" + 636 "void main() {\n" + 637 " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" + 638 "}\n"; 639 startComposition(Surface surface, int w, int h, int numWindows)640 void startComposition(Surface surface, int w, int h, int numWindows) throws Exception { 641 mSurface = surface; 642 mWidth = w; 643 mHeight = h; 644 mNumWindows = numWindows; 645 mCompositionThread = new Thread(new CompositionRunnable()); 646 mStartCompletionSemaphore = new Semaphore(0); 647 mCompositionThread.start(); 648 waitForStartCompletion(); 649 } 650 stopComposition()651 void stopComposition() { 652 try { 653 if (mLooper != null) { 654 mLooper.quit(); 655 mCompositionThread.join(); 656 } 657 } catch (InterruptedException e) { 658 // don't care 659 } 660 mCompositionThread = null; 661 mSurface = null; 662 mStartCompletionSemaphore = null; 663 } 664 getWindowSurface(int windowIndex)665 Surface getWindowSurface(int windowIndex) { 666 return mTopPresentation.getSurface(windowIndex); 667 } 668 recreateWindows()669 void recreateWindows() throws Exception { 670 mRecreationCompletionSemaphore = new Semaphore(0); 671 Message msg = mHandler.obtainMessage(CompositionHandler.DO_RECREATE_WINDOWS); 672 mHandler.sendMessage(msg); 673 if(!mRecreationCompletionSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, 674 TimeUnit.MILLISECONDS)) { 675 fail("recreation timeout"); 676 } 677 mTopPresentation.waitForSurfaceReady(DEFAULT_WAIT_TIMEOUT_MS); 678 } 679 680 @Override onFrameAvailable(SurfaceTexture surface)681 public void onFrameAvailable(SurfaceTexture surface) { 682 if (DBG) { 683 Log.i(TAG, "onFrameAvailable " + surface); 684 } 685 GlWindow w = mTopWindow; 686 if (w != null) { 687 w.markTextureUpdated(); 688 requestUpdate(); 689 } else { 690 Log.w(TAG, "top window gone"); 691 } 692 } 693 requestUpdate()694 private void requestUpdate() { 695 Thread compositionThread = mCompositionThread; 696 if (compositionThread == null || !compositionThread.isAlive()) { 697 return; 698 } 699 Message msg = mHandler.obtainMessage(CompositionHandler.DO_RENDERING); 700 mHandler.sendMessage(msg); 701 } 702 loadShader(int shaderType, String source)703 private int loadShader(int shaderType, String source) throws GlException { 704 int shader = GLES20.glCreateShader(shaderType); 705 checkGlError("glCreateShader type=" + shaderType); 706 GLES20.glShaderSource(shader, source); 707 GLES20.glCompileShader(shader); 708 int[] compiled = new int[1]; 709 GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0); 710 if (compiled[0] == 0) { 711 Log.e(TAG, "Could not compile shader " + shaderType + ":"); 712 Log.e(TAG, " " + GLES20.glGetShaderInfoLog(shader)); 713 GLES20.glDeleteShader(shader); 714 shader = 0; 715 } 716 return shader; 717 } 718 createProgram(String vertexSource, String fragmentSource)719 private int createProgram(String vertexSource, String fragmentSource) throws GlException { 720 int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource); 721 if (vertexShader == 0) { 722 return 0; 723 } 724 int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource); 725 if (pixelShader == 0) { 726 return 0; 727 } 728 729 int program = GLES20.glCreateProgram(); 730 checkGlError("glCreateProgram"); 731 if (program == 0) { 732 Log.e(TAG, "Could not create program"); 733 } 734 GLES20.glAttachShader(program, vertexShader); 735 checkGlError("glAttachShader"); 736 GLES20.glAttachShader(program, pixelShader); 737 checkGlError("glAttachShader"); 738 GLES20.glLinkProgram(program); 739 int[] linkStatus = new int[1]; 740 GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0); 741 if (linkStatus[0] != GLES20.GL_TRUE) { 742 Log.e(TAG, "Could not link program: "); 743 Log.e(TAG, GLES20.glGetProgramInfoLog(program)); 744 GLES20.glDeleteProgram(program); 745 program = 0; 746 } 747 return program; 748 } 749 initGl()750 private void initGl() throws GlException { 751 mEglHelper = new InputSurface(mSurface); 752 mEglHelper.makeCurrent(); 753 mGlProgramId = createProgram(VERTEX_SHADER, FRAGMENT_SHADER); 754 mGlaPositionHandle = GLES20.glGetAttribLocation(mGlProgramId, "aPosition"); 755 checkGlError("glGetAttribLocation aPosition"); 756 if (mGlaPositionHandle == -1) { 757 throw new RuntimeException("Could not get attrib location for aPosition"); 758 } 759 mGlaTextureHandle = GLES20.glGetAttribLocation(mGlProgramId, "aTextureCoord"); 760 checkGlError("glGetAttribLocation aTextureCoord"); 761 if (mGlaTextureHandle == -1) { 762 throw new RuntimeException("Could not get attrib location for aTextureCoord"); 763 } 764 mGluMVPMatrixHandle = GLES20.glGetUniformLocation(mGlProgramId, "uMVPMatrix"); 765 checkGlError("glGetUniformLocation uMVPMatrix"); 766 if (mGluMVPMatrixHandle == -1) { 767 throw new RuntimeException("Could not get attrib location for uMVPMatrix"); 768 } 769 mGluSTMatrixHandle = GLES20.glGetUniformLocation(mGlProgramId, "uSTMatrix"); 770 checkGlError("glGetUniformLocation uSTMatrix"); 771 if (mGluSTMatrixHandle == -1) { 772 throw new RuntimeException("Could not get attrib location for uSTMatrix"); 773 } 774 Matrix.setIdentityM(mMVPMatrix, 0); 775 Log.i(TAG, "initGl w:" + mWidth + " h:" + mHeight); 776 GLES20.glViewport(0, 0, mWidth, mHeight); 777 float[] vMatrix = new float[16]; 778 float[] projMatrix = new float[16]; 779 // max window is from (0,0) to (mWidth - 1, mHeight - 1) 780 float wMid = mWidth / 2f; 781 float hMid = mHeight / 2f; 782 // look from positive z to hide windows in lower z 783 Matrix.setLookAtM(vMatrix, 0, wMid, hMid, 5f, wMid, hMid, 0f, 0f, 1.0f, 0.0f); 784 Matrix.orthoM(projMatrix, 0, -wMid, wMid, -hMid, hMid, 1, 10); 785 Matrix.multiplyMM(mMVPMatrix, 0, projMatrix, 0, vMatrix, 0); 786 createWindows(); 787 788 } 789 createWindows()790 private void createWindows() throws GlException { 791 mTopWindow = new GlWindow(this, 0, 0, mWidth, mHeight); 792 mTopWindow.init(); 793 mTopPresentation = new TopWindowVirtualDisplayPresentation(mContext, 794 mTopWindow.getSurface(), mWidth, mHeight, mNumWindows); 795 mTopPresentation.createVirtualDisplay(); 796 mTopPresentation.createPresentation(); 797 ((TopWindowPresentation) mTopPresentation.getPresentation()).populateWindows(); 798 } 799 cleanupGl()800 private void cleanupGl() { 801 if (mTopPresentation != null) { 802 mTopPresentation.dismissPresentation(); 803 mTopPresentation.destroyVirtualDisplay(); 804 mTopPresentation = null; 805 } 806 if (mTopWindow != null) { 807 mTopWindow.cleanup(); 808 mTopWindow = null; 809 } 810 if (mEglHelper != null) { 811 mEglHelper.release(); 812 mEglHelper = null; 813 } 814 } 815 doGlRendering()816 private void doGlRendering() throws GlException { 817 if (DBG) { 818 Log.i(TAG, "doGlRendering"); 819 } 820 mTopWindow.updateTexImageIfNecessary(); 821 GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f); 822 GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); 823 824 GLES20.glUseProgram(mGlProgramId); 825 GLES20.glUniformMatrix4fv(mGluMVPMatrixHandle, 1, false, mMVPMatrix, 0); 826 mTopWindow.onDraw(mGluSTMatrixHandle, mGlaPositionHandle, mGlaTextureHandle); 827 checkGlError("window draw"); 828 if (DBG) { 829 final IntBuffer pixels = IntBuffer.allocate(1); 830 GLES20.glReadPixels(mWidth / 2, mHeight / 2, 1, 1, 831 GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, pixels); 832 Log.i(TAG, "glReadPixels returned 0x" + Integer.toHexString(pixels.get(0))); 833 } 834 mEglHelper.swapBuffers(); 835 } 836 doRecreateWindows()837 private void doRecreateWindows() throws GlException { 838 mTopPresentation.dismissPresentation(); 839 mTopPresentation.destroyVirtualDisplay(); 840 mTopWindow.cleanup(); 841 createWindows(); 842 mRecreationCompletionSemaphore.release(); 843 } 844 waitForStartCompletion()845 private void waitForStartCompletion() throws Exception { 846 if (!mStartCompletionSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, 847 TimeUnit.MILLISECONDS)) { 848 fail("start timeout"); 849 } 850 mStartCompletionSemaphore = null; 851 mTopPresentation.waitForSurfaceReady(DEFAULT_WAIT_TIMEOUT_MS); 852 } 853 854 private class CompositionRunnable implements Runnable { 855 @Override run()856 public void run() { 857 try { 858 Looper.prepare(); 859 mLooper = Looper.myLooper(); 860 mHandler = new CompositionHandler(); 861 initGl(); 862 // init done 863 mStartCompletionSemaphore.release(); 864 Looper.loop(); 865 } catch (GlException e) { 866 e.printStackTrace(); 867 fail("got gl exception"); 868 } finally { 869 cleanupGl(); 870 mHandler = null; 871 mLooper = null; 872 } 873 } 874 } 875 876 private class CompositionHandler extends Handler { 877 private static final int DO_RENDERING = 1; 878 private static final int DO_RECREATE_WINDOWS = 2; 879 880 @Override handleMessage(Message msg)881 public void handleMessage(Message msg) { 882 try { 883 switch(msg.what) { 884 case DO_RENDERING: { 885 doGlRendering(); 886 } break; 887 case DO_RECREATE_WINDOWS: { 888 doRecreateWindows(); 889 } break; 890 } 891 } catch (GlException e) { 892 //ignore as this can happen during tearing down 893 } 894 } 895 } 896 897 private class GlWindow { 898 private static final int FLOAT_SIZE_BYTES = 4; 899 private static final int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 5 * FLOAT_SIZE_BYTES; 900 private static final int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; 901 private static final int TRIANGLE_VERTICES_DATA_UV_OFFSET = 3; 902 private int mBlX; 903 private int mBlY; 904 private int mWidth; 905 private int mHeight; 906 private int mTextureId = 0; // 0 is invalid 907 private volatile SurfaceTexture mSurfaceTexture; 908 private volatile Surface mSurface; 909 private FloatBuffer mVerticesData; 910 private float[] mSTMatrix = new float[16]; 911 private AtomicInteger mNumTextureUpdated = new AtomicInteger(0); 912 private GlCompositor mCompositor; 913 914 /** 915 * @param blX X coordinate of bottom-left point of window 916 * @param blY Y coordinate of bottom-left point of window 917 * @param w window width 918 * @param h window height 919 */ GlWindow(GlCompositor compositor, int blX, int blY, int w, int h)920 public GlWindow(GlCompositor compositor, int blX, int blY, int w, int h) { 921 mCompositor = compositor; 922 mBlX = blX; 923 mBlY = blY; 924 mWidth = w; 925 mHeight = h; 926 int trX = blX + w; 927 int trY = blY + h; 928 float[] vertices = new float[] { 929 // x, y, z, u, v 930 mBlX, mBlY, 0, 0, 0, 931 trX, mBlY, 0, 1, 0, 932 mBlX, trY, 0, 0, 1, 933 trX, trY, 0, 1, 1 934 }; 935 Log.i(TAG, "create window " + this + " blX:" + mBlX + " blY:" + mBlY + " trX:" + 936 trX + " trY:" + trY); 937 mVerticesData = ByteBuffer.allocateDirect( 938 vertices.length * FLOAT_SIZE_BYTES) 939 .order(ByteOrder.nativeOrder()).asFloatBuffer(); 940 mVerticesData.put(vertices).position(0); 941 } 942 943 /** 944 * initialize the window for composition. counter-part is cleanup() 945 * @throws GlException 946 */ init()947 public void init() throws GlException { 948 int[] textures = new int[1]; 949 GLES20.glGenTextures(1, textures, 0); 950 951 mTextureId = textures[0]; 952 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); 953 checkGlError("glBindTexture mTextureID"); 954 955 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 956 GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST); 957 GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 958 GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST); 959 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, 960 GLES20.GL_CLAMP_TO_EDGE); 961 GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, 962 GLES20.GL_CLAMP_TO_EDGE); 963 checkGlError("glTexParameter"); 964 mSurfaceTexture = new SurfaceTexture(mTextureId); 965 mSurfaceTexture.setDefaultBufferSize(mWidth, mHeight); 966 mSurface = new Surface(mSurfaceTexture); 967 mSurfaceTexture.setOnFrameAvailableListener(mCompositor); 968 } 969 cleanup()970 public void cleanup() { 971 mNumTextureUpdated.set(0); 972 if (mTextureId != 0) { 973 int[] textures = new int[] { 974 mTextureId 975 }; 976 GLES20.glDeleteTextures(1, textures, 0); 977 } 978 GLES20.glFinish(); 979 if (mSurface != null) { 980 mSurface.release(); 981 mSurface = null; 982 } 983 if (mSurfaceTexture != null) { 984 mSurfaceTexture.release(); 985 mSurfaceTexture = null; 986 } 987 } 988 989 /** 990 * make texture as updated so that it can be updated in the next rendering. 991 */ markTextureUpdated()992 public void markTextureUpdated() { 993 mNumTextureUpdated.incrementAndGet(); 994 } 995 996 /** 997 * update texture for rendering if it is updated. 998 */ updateTexImageIfNecessary()999 public void updateTexImageIfNecessary() { 1000 int numTextureUpdated = mNumTextureUpdated.getAndDecrement(); 1001 if (numTextureUpdated > 0) { 1002 if (DBG) { 1003 Log.i(TAG, "updateTexImageIfNecessary " + this); 1004 } 1005 mSurfaceTexture.updateTexImage(); 1006 mSurfaceTexture.getTransformMatrix(mSTMatrix); 1007 } 1008 if (numTextureUpdated < 0) { 1009 fail("should not happen"); 1010 } 1011 } 1012 1013 /** 1014 * draw the window. It will not be drawn at all if the window is not visible. 1015 * @param uSTMatrixHandle shader handler for the STMatrix for texture coordinates 1016 * mapping 1017 * @param aPositionHandle shader handle for vertex position. 1018 * @param aTextureHandle shader handle for texture 1019 */ onDraw(int uSTMatrixHandle, int aPositionHandle, int aTextureHandle)1020 public void onDraw(int uSTMatrixHandle, int aPositionHandle, int aTextureHandle) { 1021 GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 1022 GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId); 1023 mVerticesData.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); 1024 GLES20.glVertexAttribPointer(aPositionHandle, 3, GLES20.GL_FLOAT, false, 1025 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mVerticesData); 1026 GLES20.glEnableVertexAttribArray(aPositionHandle); 1027 1028 mVerticesData.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); 1029 GLES20.glVertexAttribPointer(aTextureHandle, 2, GLES20.GL_FLOAT, false, 1030 TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mVerticesData); 1031 GLES20.glEnableVertexAttribArray(aTextureHandle); 1032 GLES20.glUniformMatrix4fv(uSTMatrixHandle, 1, false, mSTMatrix, 0); 1033 GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 1034 } 1035 getSurfaceTexture()1036 public SurfaceTexture getSurfaceTexture() { 1037 return mSurfaceTexture; 1038 } 1039 getSurface()1040 public Surface getSurface() { 1041 return mSurface; 1042 } 1043 } 1044 } 1045 checkGlError(String op)1046 static void checkGlError(String op) throws GlException { 1047 int error; 1048 while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { 1049 Log.e(TAG, op + ": glError " + error); 1050 throw new GlException(op + ": glError " + error); 1051 } 1052 } 1053 1054 public static class GlException extends Exception { GlException(String msg)1055 public GlException(String msg) { 1056 super(msg); 1057 } 1058 } 1059 1060 private interface Renderer { doRendering(final int color)1061 void doRendering(final int color) throws Exception; 1062 } 1063 1064 private static class VirtualDisplayPresentation implements Renderer { 1065 protected final Context mContext; 1066 protected final Surface mSurface; 1067 protected final int mWidth; 1068 protected final int mHeight; 1069 protected VirtualDisplay mVirtualDisplay; 1070 protected TestPresentationBase mPresentation; 1071 private final DisplayManager mDisplayManager; 1072 VirtualDisplayPresentation(Context context, Surface surface, int w, int h)1073 VirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1074 mContext = context; 1075 mSurface = surface; 1076 mWidth = w; 1077 mHeight = h; 1078 mDisplayManager = (DisplayManager)context.getSystemService(Context.DISPLAY_SERVICE); 1079 } 1080 createVirtualDisplay()1081 void createVirtualDisplay() { 1082 runOnMainSync(new Runnable() { 1083 @Override 1084 public void run() { 1085 mVirtualDisplay = mDisplayManager.createVirtualDisplay( 1086 TAG, mWidth, mHeight, 200, mSurface, 1087 DisplayManager.VIRTUAL_DISPLAY_FLAG_OWN_CONTENT_ONLY | 1088 DisplayManager.VIRTUAL_DISPLAY_FLAG_PRESENTATION); 1089 } 1090 }); 1091 } 1092 destroyVirtualDisplay()1093 void destroyVirtualDisplay() { 1094 runOnMainSync(new Runnable() { 1095 @Override 1096 public void run() { 1097 mVirtualDisplay.release(); 1098 } 1099 }); 1100 } 1101 createPresentation()1102 void createPresentation() { 1103 runOnMainSync(new Runnable() { 1104 @Override 1105 public void run() { 1106 mPresentation = doCreatePresentation(); 1107 mPresentation.show(); 1108 } 1109 }); 1110 } 1111 doCreatePresentation()1112 protected TestPresentationBase doCreatePresentation() { 1113 return new TestPresentation(mContext, mVirtualDisplay.getDisplay()); 1114 } 1115 getPresentation()1116 TestPresentationBase getPresentation() { 1117 return mPresentation; 1118 } 1119 dismissPresentation()1120 void dismissPresentation() { 1121 runOnMainSync(new Runnable() { 1122 @Override 1123 public void run() { 1124 mPresentation.dismiss(); 1125 } 1126 }); 1127 } 1128 1129 @Override doRendering(final int color)1130 public void doRendering(final int color) throws Exception { 1131 runOnMainSync(new Runnable() { 1132 @Override 1133 public void run() { 1134 mPresentation.doRendering(color); 1135 } 1136 }); 1137 } 1138 } 1139 1140 private static class TestPresentationBase extends Presentation { 1141 TestPresentationBase(Context outerContext, Display display)1142 public TestPresentationBase(Context outerContext, Display display) { 1143 // This theme is required to prevent an extra view from obscuring the presentation 1144 super(outerContext, display, 1145 android.R.style.Theme_Holo_Light_NoActionBar_TranslucentDecor); 1146 getWindow().setType(WindowManager.LayoutParams.TYPE_PRIVATE_PRESENTATION); 1147 getWindow().addFlags(WindowManager.LayoutParams.FLAG_LOCAL_FOCUS_MODE); 1148 getWindow().addFlags(WindowManager.LayoutParams.FLAG_HARDWARE_ACCELERATED); 1149 } 1150 doRendering(int color)1151 public void doRendering(int color) { 1152 // to be implemented by child 1153 } 1154 } 1155 1156 private static class TestPresentation extends TestPresentationBase { 1157 private ImageView mImageView; 1158 TestPresentation(Context outerContext, Display display)1159 public TestPresentation(Context outerContext, Display display) { 1160 super(outerContext, display); 1161 } 1162 1163 @Override onCreate(Bundle savedInstanceState)1164 protected void onCreate(Bundle savedInstanceState) { 1165 super.onCreate(savedInstanceState); 1166 mImageView = new ImageView(getContext()); 1167 mImageView.setImageDrawable(new ColorDrawable(COLOR_RED)); 1168 mImageView.setLayoutParams(new LayoutParams( 1169 LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT)); 1170 setContentView(mImageView); 1171 } 1172 doRendering(int color)1173 public void doRendering(int color) { 1174 if (DBG) { 1175 Log.i(TAG, "doRendering " + Integer.toHexString(color)); 1176 } 1177 mImageView.setImageDrawable(new ColorDrawable(color)); 1178 } 1179 } 1180 1181 private static class TopWindowPresentation extends TestPresentationBase { 1182 private FrameLayout[] mWindowsLayout = new FrameLayout[MAX_NUM_WINDOWS]; 1183 private CompositionTextureView[] mWindows = new CompositionTextureView[MAX_NUM_WINDOWS]; 1184 private final int mNumWindows; 1185 private final Semaphore mWindowWaitSemaphore = new Semaphore(0); 1186 TopWindowPresentation(int numWindows, Context outerContext, Display display)1187 public TopWindowPresentation(int numWindows, Context outerContext, Display display) { 1188 super(outerContext, display); 1189 mNumWindows = numWindows; 1190 } 1191 1192 @Override onCreate(Bundle savedInstanceState)1193 protected void onCreate(Bundle savedInstanceState) { 1194 super.onCreate(savedInstanceState); 1195 if (DBG) { 1196 Log.i(TAG, "TopWindowPresentation onCreate, numWindows " + mNumWindows); 1197 } 1198 setContentView(R.layout.composition_layout); 1199 mWindowsLayout[0] = (FrameLayout) findViewById(R.id.window0); 1200 mWindowsLayout[1] = (FrameLayout) findViewById(R.id.window1); 1201 mWindowsLayout[2] = (FrameLayout) findViewById(R.id.window2); 1202 } 1203 populateWindows()1204 public void populateWindows() { 1205 runOnMain(new Runnable() { 1206 public void run() { 1207 for (int i = 0; i < mNumWindows; i++) { 1208 mWindows[i] = new CompositionTextureView(getContext()); 1209 mWindows[i].setLayoutParams(new ViewGroup.LayoutParams( 1210 ViewGroup.LayoutParams.MATCH_PARENT, 1211 ViewGroup.LayoutParams.MATCH_PARENT)); 1212 mWindowsLayout[i].setVisibility(View.VISIBLE); 1213 mWindowsLayout[i].addView(mWindows[i]); 1214 mWindows[i].startListening(); 1215 } 1216 mWindowWaitSemaphore.release(); 1217 } 1218 }); 1219 } 1220 waitForSurfaceReady(long timeoutMs)1221 public void waitForSurfaceReady(long timeoutMs) throws Exception { 1222 mWindowWaitSemaphore.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, TimeUnit.MILLISECONDS); 1223 for (int i = 0; i < mNumWindows; i++) { 1224 if(!mWindows[i].waitForSurfaceReady(timeoutMs)) { 1225 fail("surface wait timeout"); 1226 } 1227 } 1228 } 1229 getSurface(int windowIndex)1230 public Surface getSurface(int windowIndex) { 1231 Surface surface = mWindows[windowIndex].getSurface(); 1232 assertNotNull(surface); 1233 return surface; 1234 } 1235 } 1236 1237 private static class TopWindowVirtualDisplayPresentation extends VirtualDisplayPresentation { 1238 private final int mNumWindows; 1239 TopWindowVirtualDisplayPresentation(Context context, Surface surface, int w, int h, int numWindows)1240 TopWindowVirtualDisplayPresentation(Context context, Surface surface, int w, int h, 1241 int numWindows) { 1242 super(context, surface, w, h); 1243 assertNotNull(surface); 1244 mNumWindows = numWindows; 1245 } 1246 waitForSurfaceReady(long timeoutMs)1247 void waitForSurfaceReady(long timeoutMs) throws Exception { 1248 ((TopWindowPresentation) mPresentation).waitForSurfaceReady(timeoutMs); 1249 } 1250 getSurface(int windowIndex)1251 Surface getSurface(int windowIndex) { 1252 return ((TopWindowPresentation) mPresentation).getSurface(windowIndex); 1253 } 1254 doCreatePresentation()1255 protected TestPresentationBase doCreatePresentation() { 1256 return new TopWindowPresentation(mNumWindows, mContext, mVirtualDisplay.getDisplay()); 1257 } 1258 } 1259 1260 private static class RemoteVirtualDisplayPresentation implements Renderer { 1261 /** argument: Surface, int w, int h, return none */ 1262 private static final int BINDER_CMD_START = IBinder.FIRST_CALL_TRANSACTION; 1263 /** argument: int color, return none */ 1264 private static final int BINDER_CMD_RENDER = IBinder.FIRST_CALL_TRANSACTION + 1; 1265 1266 private final Context mContext; 1267 private final Surface mSurface; 1268 private final int mWidth; 1269 private final int mHeight; 1270 1271 private IBinder mService; 1272 private final Semaphore mConnectionWait = new Semaphore(0); 1273 private final ServiceConnection mConnection = new ServiceConnection() { 1274 1275 public void onServiceConnected(ComponentName arg0, IBinder arg1) { 1276 mService = arg1; 1277 mConnectionWait.release(); 1278 } 1279 1280 public void onServiceDisconnected(ComponentName arg0) { 1281 //ignore 1282 } 1283 1284 }; 1285 RemoteVirtualDisplayPresentation(Context context, Surface surface, int w, int h)1286 RemoteVirtualDisplayPresentation(Context context, Surface surface, int w, int h) { 1287 mContext = context; 1288 mSurface = surface; 1289 mWidth = w; 1290 mHeight = h; 1291 } 1292 connect()1293 void connect() throws Exception { 1294 Intent intent = new Intent(); 1295 intent.setClassName("com.android.cts.media", 1296 "android.media.cts.RemoteVirtualDisplayService"); 1297 mContext.bindService(intent, mConnection, Context.BIND_AUTO_CREATE); 1298 if (!mConnectionWait.tryAcquire(DEFAULT_WAIT_TIMEOUT_MS, TimeUnit.MILLISECONDS)) { 1299 fail("cannot bind to service"); 1300 } 1301 } 1302 disconnect()1303 void disconnect() { 1304 mContext.unbindService(mConnection); 1305 } 1306 start()1307 void start() throws Exception { 1308 Parcel parcel = Parcel.obtain(); 1309 mSurface.writeToParcel(parcel, 0); 1310 parcel.writeInt(mWidth); 1311 parcel.writeInt(mHeight); 1312 mService.transact(BINDER_CMD_START, parcel, null, 0); 1313 } 1314 1315 @Override doRendering(int color)1316 public void doRendering(int color) throws Exception { 1317 Parcel parcel = Parcel.obtain(); 1318 parcel.writeInt(color); 1319 mService.transact(BINDER_CMD_RENDER, parcel, null, 0); 1320 } 1321 } 1322 getMaxSupportedEncoderSize()1323 private static Size getMaxSupportedEncoderSize() { 1324 final Size[] standardSizes = new Size[] { 1325 new Size(1920, 1080), 1326 new Size(1280, 720), 1327 new Size(720, 480), 1328 new Size(352, 576) 1329 }; 1330 1331 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 1332 for (Size sz : standardSizes) { 1333 MediaFormat format = MediaFormat.createVideoFormat( 1334 MIME_TYPE, sz.getWidth(), sz.getHeight()); 1335 format.setInteger(MediaFormat.KEY_FRAME_RATE, 15); // require at least 15fps 1336 if (mcl.findEncoderForFormat(format) != null) { 1337 return sz; 1338 } 1339 } 1340 return null; 1341 } 1342 1343 /** 1344 * Check maximum concurrent encoding / decoding resolution allowed. 1345 * Some H/Ws cannot support maximum resolution reported in encoder if decoder is running 1346 * at the same time. 1347 * Check is done for 4 different levels: 1080p, 720p, 800x480, 480p 1348 * (The last one is required by CDD.) 1349 */ checkMaxConcurrentEncodingDecodingResolution()1350 private Size checkMaxConcurrentEncodingDecodingResolution() { 1351 if (isConcurrentEncodingDecodingSupported(1920, 1080, BITRATE_1080p)) { 1352 return new Size(1920, 1080); 1353 } else if (isConcurrentEncodingDecodingSupported(1280, 720, BITRATE_720p)) { 1354 return new Size(1280, 720); 1355 } else if (isConcurrentEncodingDecodingSupported(800, 480, BITRATE_800x480)) { 1356 return new Size(800, 480); 1357 } else if (isConcurrentEncodingDecodingSupported(720, 480, BITRATE_DEFAULT)) { 1358 return new Size(720, 480); 1359 } 1360 Log.i(TAG, "SKIPPING test: concurrent encoding and decoding is not supported"); 1361 return null; 1362 } 1363 isConcurrentEncodingDecodingSupported(int w, int h, int bitRate)1364 private boolean isConcurrentEncodingDecodingSupported(int w, int h, int bitRate) { 1365 MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS); 1366 MediaFormat testFormat = MediaFormat.createVideoFormat(MIME_TYPE, w, h); 1367 testFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1368 if (mcl.findDecoderForFormat(testFormat) == null 1369 || mcl.findEncoderForFormat(testFormat) == null) { 1370 return false; 1371 } 1372 1373 MediaCodec decoder = null; 1374 OutputSurface decodingSurface = null; 1375 MediaCodec encoder = null; 1376 Surface encodingSurface = null; 1377 try { 1378 decoder = MediaCodec.createDecoderByType(MIME_TYPE); 1379 MediaFormat decoderFormat = MediaFormat.createVideoFormat(MIME_TYPE, w, h); 1380 decodingSurface = new OutputSurface(w, h); 1381 decodingSurface.makeCurrent(); 1382 decoder.configure(decoderFormat, decodingSurface.getSurface(), null, 0); 1383 decoder.start(); 1384 1385 MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, w, h); 1386 format.setInteger(MediaFormat.KEY_COLOR_FORMAT, 1387 MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 1388 format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); 1389 format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 1390 format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 1391 encoder = MediaCodec.createEncoderByType(MIME_TYPE);; 1392 encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 1393 encodingSurface = encoder.createInputSurface(); 1394 encoder.start(); 1395 1396 encoder.stop(); 1397 decoder.stop(); 1398 } catch (Exception e) { 1399 e.printStackTrace(); 1400 Log.i(TAG, "This H/W does not support w:" + w + " h:" + h); 1401 return false; 1402 } finally { 1403 if (encodingSurface != null) { 1404 encodingSurface.release(); 1405 } 1406 if (encoder != null) { 1407 encoder.release(); 1408 } 1409 if (decoder != null) { 1410 decoder.release(); 1411 } 1412 if (decodingSurface != null) { 1413 decodingSurface.release(); 1414 } 1415 } 1416 return true; 1417 } 1418 runOnMain(Runnable runner)1419 private static void runOnMain(Runnable runner) { 1420 sHandlerForRunOnMain.post(runner); 1421 } 1422 runOnMainSync(Runnable runner)1423 private static void runOnMainSync(Runnable runner) { 1424 SyncRunnable sr = new SyncRunnable(runner); 1425 sHandlerForRunOnMain.post(sr); 1426 sr.waitForComplete(); 1427 } 1428 1429 private static final class SyncRunnable implements Runnable { 1430 private final Runnable mTarget; 1431 private boolean mComplete; 1432 SyncRunnable(Runnable target)1433 public SyncRunnable(Runnable target) { 1434 mTarget = target; 1435 } 1436 run()1437 public void run() { 1438 mTarget.run(); 1439 synchronized (this) { 1440 mComplete = true; 1441 notifyAll(); 1442 } 1443 } 1444 waitForComplete()1445 public void waitForComplete() { 1446 synchronized (this) { 1447 while (!mComplete) { 1448 try { 1449 wait(); 1450 } catch (InterruptedException e) { 1451 //ignore 1452 } 1453 } 1454 } 1455 } 1456 } 1457 } 1458