1 /*
2  * Copyright (C) 2023 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 //#define LOG_NDEBUG 0
17 #define LOG_TAG "GraphicsTracker_test"
18 #include <unistd.h>
19 
20 #include <android/hardware_buffer.h>
21 #include <codec2/aidl/GraphicsTracker.h>
22 #include <binder/IPCThreadState.h>
23 #include <binder/IServiceManager.h>
24 #include <binder/ProcessState.h>
25 #include <gtest/gtest.h>
26 #include <gui/BufferQueue.h>
27 #include <gui/IProducerListener.h>
28 #include <gui/IConsumerListener.h>
29 #include <gui/Surface.h>
30 #include <private/android/AHardwareBufferHelpers.h>
31 
32 #include <C2BlockInternal.h>
33 #include <C2FenceFactory.h>
34 
35 #include <atomic>
36 #include <memory>
37 #include <iostream>
38 #include <thread>
39 
40 using ::aidl::android::hardware::media::c2::implementation::GraphicsTracker;
41 using ::android::BufferItem;
42 using ::android::BufferQueue;
43 using ::android::Fence;
44 using ::android::GraphicBuffer;
45 using ::android::IGraphicBufferProducer;
46 using ::android::IGraphicBufferConsumer;
47 using ::android::IProducerListener;
48 using ::android::IConsumerListener;
49 using ::android::OK;
50 using ::android::sp;
51 using ::android::wp;
52 
53 namespace {
54 struct BqStatistics {
55     std::atomic<int> mDequeued;
56     std::atomic<int> mQueued;
57     std::atomic<int> mBlocked;
58     std::atomic<int> mDropped;
59     std::atomic<int> mDiscarded;
60     std::atomic<int> mReleased;
61 
log__anon38b71d1b0111::BqStatistics62     void log() {
63         ALOGD("Dequeued: %d, Queued: %d, Blocked: %d, "
64               "Dropped: %d, Discarded %d, Released %d",
65               (int)mDequeued, (int)mQueued, (int)mBlocked,
66               (int)mDropped, (int)mDiscarded, (int)mReleased);
67     }
68 
clear__anon38b71d1b0111::BqStatistics69     void clear() {
70         mDequeued = 0;
71         mQueued = 0;
72         mBlocked = 0;
73         mDropped = 0;
74         mDiscarded = 0;
75         mReleased = 0;
76     }
77 };
78 
79 struct DummyConsumerListener : public android::BnConsumerListener {
onFrameAvailable__anon38b71d1b0111::DummyConsumerListener80     void onFrameAvailable(const BufferItem& /* item */) override {}
onBuffersReleased__anon38b71d1b0111::DummyConsumerListener81     void onBuffersReleased() override {}
onSidebandStreamChanged__anon38b71d1b0111::DummyConsumerListener82     void onSidebandStreamChanged() override {}
83 };
84 
85 struct TestConsumerListener : public android::BnConsumerListener {
TestConsumerListener__anon38b71d1b0111::TestConsumerListener86     TestConsumerListener(const sp<IGraphicBufferConsumer> &consumer)
87             : BnConsumerListener(), mConsumer(consumer) {}
onFrameAvailable__anon38b71d1b0111::TestConsumerListener88     void onFrameAvailable(const BufferItem&) override {
89         constexpr static int kRenderDelayUs = 1000000/30; // 30fps
90         BufferItem buffer;
91         // consume buffer
92         sp<IGraphicBufferConsumer> consumer = mConsumer.promote();
93         if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == android::NO_ERROR) {
94             ::usleep(kRenderDelayUs);
95             consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber,
96                                     EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, buffer.mFence);
97         }
98     }
onBuffersReleased__anon38b71d1b0111::TestConsumerListener99     void onBuffersReleased() override {}
onSidebandStreamChanged__anon38b71d1b0111::TestConsumerListener100     void onSidebandStreamChanged() override {}
101 
102     wp<IGraphicBufferConsumer> mConsumer;
103 };
104 
105 struct TestProducerListener : public android::BnProducerListener {
TestProducerListener__anon38b71d1b0111::TestProducerListener106     TestProducerListener(std::shared_ptr<GraphicsTracker> tracker,
107                          std::shared_ptr<BqStatistics> &stat,
108                          uint32_t generation) : BnProducerListener(),
109         mTracker(tracker), mStat(stat), mGeneration(generation) {}
onBufferReleased__anon38b71d1b0111::TestProducerListener110     virtual void onBufferReleased() override {
111         auto tracker = mTracker.lock();
112         if (tracker) {
113             mStat->mReleased++;
114             tracker->onReleased(mGeneration);
115         }
116     }
needsReleaseNotify__anon38b71d1b0111::TestProducerListener117     virtual bool needsReleaseNotify() override { return true; }
onBuffersDiscarded__anon38b71d1b0111::TestProducerListener118     virtual void onBuffersDiscarded(const std::vector<int32_t>&) override {}
119 
120     std::weak_ptr<GraphicsTracker> mTracker;
121     std::shared_ptr<BqStatistics> mStat;
122     uint32_t mGeneration;
123 };
124 
125 struct Frame {
126     AHardwareBuffer *buffer_;
127     sp<Fence> fence_;
128 
Frame__anon38b71d1b0111::Frame129     Frame() : buffer_{nullptr}, fence_{nullptr} {}
Frame__anon38b71d1b0111::Frame130     Frame(AHardwareBuffer *buffer, sp<Fence> fence)
131             : buffer_(buffer), fence_(fence) {}
~Frame__anon38b71d1b0111::Frame132     ~Frame() {
133         if (buffer_) {
134             AHardwareBuffer_release(buffer_);
135         }
136     }
137 };
138 
139 struct FrameQueue {
140     bool mStopped;
141     bool mDrain;
142     std::queue<std::shared_ptr<Frame>> mQueue;
143     std::mutex mMutex;
144     std::condition_variable mCond;
145 
FrameQueue__anon38b71d1b0111::FrameQueue146     FrameQueue() : mStopped{false}, mDrain{false} {}
147 
queueItem__anon38b71d1b0111::FrameQueue148     bool queueItem(AHardwareBuffer *buffer, sp<Fence> fence) {
149         std::shared_ptr<Frame> frame = std::make_shared<Frame>(buffer, fence);
150         if (mStopped) {
151             return false;
152         }
153         if (!frame) {
154             return false;
155         }
156         std::unique_lock<std::mutex> l(mMutex);
157         mQueue.emplace(frame);
158         l.unlock();
159         mCond.notify_all();
160         return true;
161     }
162 
stop__anon38b71d1b0111::FrameQueue163     void stop(bool drain = false) {
164         bool stopped = false;
165         {
166             std::unique_lock<std::mutex> l(mMutex);
167             if (!mStopped) {
168                 mStopped = true;
169                 mDrain = drain;
170                 stopped = true;
171             }
172             l.unlock();
173             if (stopped) {
174                 mCond.notify_all();
175             }
176         }
177     }
178 
waitItem__anon38b71d1b0111::FrameQueue179     bool waitItem(std::shared_ptr<Frame> *frame) {
180         while(true) {
181             std::unique_lock<std::mutex> l(mMutex);
182             if (!mDrain && mStopped) {
183                 // stop without consuming the queue.
184                 return false;
185             }
186             if (!mQueue.empty()) {
187                 *frame = mQueue.front();
188                 mQueue.pop();
189                 return true;
190             } else if (mStopped) {
191                 // stop after consuming the queue.
192                 return false;
193             }
194             mCond.wait(l);
195         }
196     }
197 };
198 
199 } // namespace anonymous
200 
201 class GraphicsTrackerTest : public ::testing::Test {
202 public:
203     const uint64_t kTestUsageFlag = GRALLOC_USAGE_SW_WRITE_OFTEN;
204 
queueBuffer(FrameQueue * queue)205     void queueBuffer(FrameQueue *queue) {
206         while (true) {
207             std::shared_ptr<Frame> frame;
208             if (!queue->waitItem(&frame)) {
209                 break;
210             }
211             uint64_t bid;
212             if (__builtin_available(android __ANDROID_API_T__, *)) {
213                 if (AHardwareBuffer_getId(frame->buffer_, &bid) !=
214                         android::NO_ERROR) {
215                     break;
216                 }
217             } else {
218                 break;
219             }
220             android::status_t ret = frame->fence_->wait(-1);
221             if (ret != android::NO_ERROR) {
222                 mTracker->deallocate(bid, frame->fence_);
223                 mBqStat->mDiscarded++;
224                 continue;
225             }
226 
227             std::shared_ptr<C2GraphicBlock> blk =
228                     _C2BlockFactory::CreateGraphicBlock(frame->buffer_);
229             if (!blk) {
230                 mTracker->deallocate(bid, Fence::NO_FENCE);
231                 mBqStat->mDiscarded++;
232                 continue;
233             }
234             IGraphicBufferProducer::QueueBufferInput input(
235                     0, false,
236                     HAL_DATASPACE_UNKNOWN, android::Rect(0, 0, 1, 1),
237                     NATIVE_WINDOW_SCALING_MODE_FREEZE, 0, Fence::NO_FENCE);
238             IGraphicBufferProducer::QueueBufferOutput output{};
239             c2_status_t res = mTracker->render(
240                     blk->share(C2Rect(1, 1), C2Fence()),
241                     input, &output);
242             if (res != C2_OK) {
243                 mTracker->deallocate(bid, Fence::NO_FENCE);
244                 mBqStat->mDiscarded++;
245                 continue;
246             }
247             if (output.bufferReplaced) {
248                 mBqStat->mDropped++;
249             }
250             mBqStat->mQueued++;
251         }
252     }
253 
stopTrackerAfterUs(int us)254     void stopTrackerAfterUs(int us) {
255         ::usleep(us);
256         mTracker->stop();
257     }
258 
259 protected:
init(int maxDequeueCount)260     bool init(int maxDequeueCount) {
261         mTracker = GraphicsTracker::CreateGraphicsTracker(maxDequeueCount);
262         if (!mTracker) {
263             return false;
264         }
265         BufferQueue::createBufferQueue(&mProducer, &mConsumer);
266         if (!mProducer || !mConsumer) {
267             return false;
268         }
269         return true;
270     }
configure(sp<IProducerListener> producerListener,sp<IConsumerListener> consumerListener,int maxAcquiredCount=1,bool controlledByApp=true)271     bool configure(sp<IProducerListener> producerListener,
272                    sp<IConsumerListener> consumerListener,
273                    int maxAcquiredCount = 1, bool controlledByApp = true) {
274         if (mConsumer->consumerConnect(
275                 consumerListener, controlledByApp) != ::android::NO_ERROR) {
276             return false;
277         }
278         if (mConsumer->setMaxAcquiredBufferCount(maxAcquiredCount) != ::android::NO_ERROR) {
279             return false;
280         }
281         IGraphicBufferProducer::QueueBufferOutput qbo{};
282         if (mProducer->connect(producerListener,
283                           NATIVE_WINDOW_API_MEDIA, true, &qbo) != ::android::NO_ERROR) {
284             return false;
285         }
286         if (mProducer->setDequeueTimeout(0) != ::android::NO_ERROR) {
287             return false;
288         }
289         return true;
290     }
291 
TearDown()292     virtual void TearDown() override {
293         mBqStat->log();
294         mBqStat->clear();
295 
296         if (mTracker) {
297             mTracker->stop();
298             mTracker.reset();
299         }
300         if (mProducer) {
301             mProducer->disconnect(NATIVE_WINDOW_API_MEDIA);
302         }
303         mProducer.clear();
304         mConsumer.clear();
305     }
306 
307 protected:
308     std::shared_ptr<BqStatistics> mBqStat = std::make_shared<BqStatistics>();
309     sp<IGraphicBufferProducer> mProducer;
310     sp<IGraphicBufferConsumer> mConsumer;
311     std::shared_ptr<GraphicsTracker> mTracker;
312 };
313 
314 
TEST_F(GraphicsTrackerTest,AllocateAndBlockedTest)315 TEST_F(GraphicsTrackerTest, AllocateAndBlockedTest) {
316     uint32_t generation = 1;
317     const int maxDequeueCount = 10;
318 
319     ASSERT_TRUE(init(maxDequeueCount));
320     ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
321                           new DummyConsumerListener()));
322 
323     ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
324     c2_status_t ret = mTracker->configureGraphics(mProducer, generation);
325     ASSERT_EQ(C2_OK, ret);
326     ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
327 
328     AHardwareBuffer *buf;
329     sp<Fence> fence;
330     uint64_t bid;
331 
332     // Allocate and check dequeueable
333     if (__builtin_available(android __ANDROID_API_T__, *)) {
334         for (int i = 0; i < maxDequeueCount; ++i) {
335             ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
336             ASSERT_EQ(C2_OK, ret);
337             mBqStat->mDequeued++;
338             ASSERT_EQ(maxDequeueCount - (i + 1), mTracker->getCurDequeueable());
339             ASSERT_EQ(OK, AHardwareBuffer_getId(buf, &bid));
340             ALOGD("alloced : bufferId: %llu", (unsigned long long)bid);
341             AHardwareBuffer_release(buf);
342         }
343     } else {
344         GTEST_SKIP();
345     }
346 
347     // Allocate should be blocked
348     ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
349     ALOGD("alloc : err(%d, %d)", ret, C2_BLOCKING);
350     ASSERT_EQ(C2_BLOCKING, ret);
351     mBqStat->mBlocked++;
352     ASSERT_EQ(0, mTracker->getCurDequeueable());
353 }
354 
TEST_F(GraphicsTrackerTest,AllocateAndDeallocateTest)355 TEST_F(GraphicsTrackerTest, AllocateAndDeallocateTest) {
356     uint32_t generation = 1;
357     const int maxDequeueCount = 10;
358 
359     ASSERT_TRUE(init(maxDequeueCount));
360     ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
361                           new DummyConsumerListener()));
362 
363     ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
364     c2_status_t ret = mTracker->configureGraphics(mProducer, generation);
365     ASSERT_EQ(C2_OK, ret);
366 
367     ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
368     AHardwareBuffer *buf;
369     sp<Fence> fence;
370     uint64_t bid;
371     std::vector<uint64_t> bids;
372 
373     // Allocate and store buffer id
374     if (__builtin_available(android __ANDROID_API_T__, *)) {
375         for (int i = 0; i < maxDequeueCount; ++i) {
376             ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
377             ASSERT_EQ(C2_OK, ret);
378             mBqStat->mDequeued++;
379             ASSERT_EQ(OK, AHardwareBuffer_getId(buf, &bid));
380             bids.push_back(bid);
381             ALOGD("alloced : bufferId: %llu", (unsigned long long)bid);
382             AHardwareBuffer_release(buf);
383         }
384     } else {
385         GTEST_SKIP();
386     }
387 
388     // Deallocate and check dequeueable
389     for (int i = 0; i < maxDequeueCount; ++i) {
390         ALOGD("dealloc : bufferId: %llu", (unsigned long long)bids[i]);
391         ret = mTracker->deallocate(bids[i], Fence::NO_FENCE);
392         ASSERT_EQ(C2_OK, ret);
393         ASSERT_EQ(i + 1, mTracker->getCurDequeueable());
394         mBqStat->mDiscarded++;
395     }
396 }
397 
TEST_F(GraphicsTrackerTest,DropAndReleaseTest)398 TEST_F(GraphicsTrackerTest, DropAndReleaseTest) {
399     uint32_t generation = 1;
400     const int maxDequeueCount = 10;
401 
402     ASSERT_TRUE(init(maxDequeueCount));
403     ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
404                           new DummyConsumerListener()));
405 
406     ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
407     c2_status_t ret = mTracker->configureGraphics(mProducer, generation);
408     ASSERT_EQ(C2_OK, ret);
409 
410     ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
411 
412     FrameQueue frameQueue;
413     std::thread queueThread(&GraphicsTrackerTest::queueBuffer, this, &frameQueue);
414     AHardwareBuffer *buf1, *buf2;
415     sp<Fence> fence1, fence2;
416 
417     ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf1, &fence1);
418     ASSERT_EQ(C2_OK, ret);
419     mBqStat->mDequeued++;
420     ASSERT_EQ(maxDequeueCount - 1, mTracker->getCurDequeueable());
421 
422     ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf2, &fence2);
423     ASSERT_EQ(C2_OK, ret);
424     mBqStat->mDequeued++;
425     ASSERT_EQ(maxDequeueCount - 2, mTracker->getCurDequeueable());
426 
427     // Queue two buffers without consuming, one should be dropped
428     ASSERT_TRUE(frameQueue.queueItem(buf1, fence1));
429     ASSERT_TRUE(frameQueue.queueItem(buf2, fence2));
430 
431     frameQueue.stop(true);
432     if (queueThread.joinable()) {
433         queueThread.join();
434     }
435 
436     ASSERT_EQ(maxDequeueCount - 1, mTracker->getCurDequeueable());
437 
438     // Consume one buffer and release
439     BufferItem item;
440     ASSERT_EQ(OK, mConsumer->acquireBuffer(&item, 0));
441     ASSERT_EQ(OK, mConsumer->releaseBuffer(item.mSlot, item.mFrameNumber,
442             EGL_NO_DISPLAY, EGL_NO_SYNC_KHR, item.mFence));
443     // Nothing to consume
444     ASSERT_NE(OK, mConsumer->acquireBuffer(&item, 0));
445 
446     ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
447     ASSERT_EQ(1, mBqStat->mReleased);
448     ASSERT_EQ(1, mBqStat->mDropped);
449 }
450 
TEST_F(GraphicsTrackerTest,RenderTest)451 TEST_F(GraphicsTrackerTest, RenderTest) {
452     uint32_t generation = 1;
453     const int maxDequeueCount = 10;
454     const int maxNumAlloc = 20;
455 
456     ASSERT_TRUE(init(maxDequeueCount));
457     ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
458                           new TestConsumerListener(mConsumer), 1, false));
459 
460     ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
461 
462     ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
463     ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
464 
465     int waitFd = -1;
466     ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
467     C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
468 
469 
470     FrameQueue frameQueue;
471     std::thread queueThread(&GraphicsTrackerTest::queueBuffer, this, &frameQueue);
472 
473     int numAlloc = 0;
474 
475     while (numAlloc < maxNumAlloc) {
476         AHardwareBuffer *buf;
477         sp<Fence> fence;
478         c2_status_t ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
479         if (ret == C2_BLOCKING) {
480             mBqStat->mBlocked++;
481             c2_status_t waitRes = waitFence.wait(3000000000);
482             if (waitRes == C2_TIMED_OUT || waitRes == C2_OK) {
483                 continue;
484             }
485             ALOGE("alloc wait failed: c2_err(%d)", waitRes);
486             break;
487         }
488         if (ret != C2_OK) {
489             ALOGE("alloc error: c2_err(%d)", ret);
490             break;
491         }
492         mBqStat->mDequeued++;
493         if (!frameQueue.queueItem(buf, fence)) {
494             ALOGE("queue to render failed");
495             break;
496         }
497         ++numAlloc;
498     }
499 
500     frameQueue.stop(true);
501     // Wait more than enough time(1 sec) to render all queued frames for sure.
502     ::usleep(1000000);
503 
504     if (queueThread.joinable()) {
505         queueThread.join();
506     }
507     ASSERT_EQ(numAlloc, maxNumAlloc);
508     ASSERT_EQ(numAlloc, mBqStat->mDequeued);
509     ASSERT_EQ(mBqStat->mDequeued, mBqStat->mQueued);
510     ASSERT_EQ(mBqStat->mDequeued, mBqStat->mReleased + mBqStat->mDropped);
511 }
512 
TEST_F(GraphicsTrackerTest,StopAndWaitTest)513 TEST_F(GraphicsTrackerTest, StopAndWaitTest) {
514     uint32_t generation = 1;
515     const int maxDequeueCount = 2;
516 
517     ASSERT_TRUE(init(maxDequeueCount));
518     ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
519                           new TestConsumerListener(mConsumer), 1, false));
520 
521     ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
522 
523     ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
524     ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
525 
526     int waitFd = -1;
527     ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
528     C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
529 
530     AHardwareBuffer *buf1, *buf2;
531     sp<Fence> fence;
532 
533     ASSERT_EQ(C2_OK, mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf1, &fence));
534     mBqStat->mDequeued++;
535     AHardwareBuffer_release(buf1);
536 
537     ASSERT_EQ(C2_OK, mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf2, &fence));
538     mBqStat->mDequeued++;
539     AHardwareBuffer_release(buf2);
540 
541     ASSERT_EQ(0, mTracker->getCurDequeueable());
542     ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(3000000000));
543 
544     std::thread stopThread(&GraphicsTrackerTest::stopTrackerAfterUs, this, 500000);
545     ASSERT_EQ(C2_BAD_STATE, waitFence.wait(3000000000));
546 
547     if (stopThread.joinable()) {
548         stopThread.join();
549     }
550 }
551 
TEST_F(GraphicsTrackerTest,SurfaceChangeTest)552 TEST_F(GraphicsTrackerTest, SurfaceChangeTest) {
553     uint32_t generation = 1;
554     const int maxDequeueCount = 10;
555 
556     const int maxNumAlloc = 20;
557 
558     const int firstPassAlloc = 12;
559     const int firstPassRender = 8;
560 
561     ASSERT_TRUE(init(maxDequeueCount));
562     ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
563                           new TestConsumerListener(mConsumer), 1, false));
564 
565     ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
566 
567     ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
568     ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
569 
570     int waitFd = -1;
571     ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
572     C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
573 
574     AHardwareBuffer *bufs[maxNumAlloc];
575     sp<Fence> fences[maxNumAlloc];
576 
577     FrameQueue frameQueue;
578     std::thread queueThread(&GraphicsTrackerTest::queueBuffer, this, &frameQueue);
579     int numAlloc = 0;
580 
581     for (int i = 0; i < firstPassRender; ++i) {
582         ASSERT_EQ(C2_OK, mTracker->allocate(
583                 0, 0, 0, kTestUsageFlag, &bufs[i], &fences[i]));
584         mBqStat->mDequeued++;
585         numAlloc++;
586         ASSERT_EQ(true, frameQueue.queueItem(bufs[i], fences[i]));
587     }
588 
589     while (numAlloc < firstPassAlloc) {
590         c2_status_t ret = mTracker->allocate(
591                 0, 0, 0, kTestUsageFlag, &bufs[numAlloc], &fences[numAlloc]);
592         if (ret == C2_BLOCKING) {
593             mBqStat->mBlocked++;
594             c2_status_t waitRes = waitFence.wait(3000000000);
595             if (waitRes == C2_TIMED_OUT || waitRes == C2_OK) {
596                 continue;
597             }
598             ALOGE("alloc wait failed: c2_err(%d)", waitRes);
599             break;
600         }
601         if (ret != C2_OK) {
602             ALOGE("alloc error: c2_err(%d)", ret);
603             break;
604         }
605         mBqStat->mDequeued++;
606         numAlloc++;
607     }
608     ASSERT_EQ(numAlloc, firstPassAlloc);
609 
610     // switching surface
611     sp<IGraphicBufferProducer> oldProducer = mProducer;
612     sp<IGraphicBufferConsumer> oldConsumer = mConsumer;
613     mProducer.clear();
614     mConsumer.clear();
615     BufferQueue::createBufferQueue(&mProducer, &mConsumer);
616     ASSERT_TRUE((bool)mProducer && (bool)mConsumer);
617 
618     generation += 1;
619 
620     ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
621                           new TestConsumerListener(mConsumer), 1, false));
622     ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
623     ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
624     ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
625 
626     ASSERT_EQ(OK, oldProducer->disconnect(NATIVE_WINDOW_API_MEDIA));
627     oldProducer.clear();
628     oldConsumer.clear();
629 
630     for (int i = firstPassRender ; i < firstPassAlloc; ++i) {
631         ASSERT_EQ(true, frameQueue.queueItem(bufs[i], fences[i]));
632     }
633 
634     while (numAlloc < maxNumAlloc) {
635         AHardwareBuffer *buf;
636         sp<Fence> fence;
637         c2_status_t ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
638         if (ret == C2_BLOCKING) {
639             mBqStat->mBlocked++;
640             c2_status_t waitRes = waitFence.wait(3000000000);
641             if (waitRes == C2_TIMED_OUT || waitRes == C2_OK) {
642                 continue;
643             }
644             ALOGE("alloc wait failed: c2_err(%d)", waitRes);
645             break;
646         }
647         if (ret != C2_OK) {
648             ALOGE("alloc error: c2_err(%d)", ret);
649             break;
650         }
651         mBqStat->mDequeued++;
652         if (!frameQueue.queueItem(buf, fence)) {
653             ALOGE("queue to render failed");
654             break;
655         }
656         ++numAlloc;
657     }
658 
659     ASSERT_EQ(numAlloc, maxNumAlloc);
660 
661     frameQueue.stop(true);
662     // Wait more than enough time(1 sec) to render all queued frames for sure.
663     ::usleep(1000000);
664 
665     if (queueThread.joinable()) {
666         queueThread.join();
667     }
668     // mReleased should not be checked. IProducerListener::onBufferReleased()
669     // from the previous Surface could be missing after a new Surface was
670     // configured. Instead check # of dequeueable and queueBuffer() calls.
671     ASSERT_EQ(numAlloc, mBqStat->mQueued);
672     ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
673 
674     for (int i = 0; i < maxDequeueCount; ++i) {
675         AHardwareBuffer *buf;
676         sp<Fence> fence;
677 
678         ASSERT_EQ(C2_OK, mTracker->allocate(
679                 0, 0, 0, kTestUsageFlag, &buf, &fence));
680         AHardwareBuffer_release(buf);
681         mBqStat->mDequeued++;
682         numAlloc++;
683     }
684     ASSERT_EQ(C2_BLOCKING, mTracker->allocate(
685             0, 0, 0, kTestUsageFlag, &bufs[0], &fences[0]));
686 }
687 
TEST_F(GraphicsTrackerTest,maxDequeueIncreaseTest)688 TEST_F(GraphicsTrackerTest, maxDequeueIncreaseTest) {
689     uint32_t generation = 1;
690     int maxDequeueCount = 10;
691     int dequeueIncrease = 4;
692 
693     int numAlloc = 0;
694 
695     ASSERT_TRUE(init(maxDequeueCount));
696     ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
697                           new TestConsumerListener(mConsumer), 1, false));
698 
699     ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
700     ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
701 
702     int waitFd = -1;
703     ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
704     C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
705 
706     AHardwareBuffer *buf;
707     sp<Fence> fence;
708     uint64_t bids[maxDequeueCount];
709     if (__builtin_available(android __ANDROID_API_T__, *)) {
710         for (int i = 0; i < maxDequeueCount; ++i) {
711             ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
712             ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
713             ASSERT_EQ(OK, AHardwareBuffer_getId(buf, &bids[i]));
714             AHardwareBuffer_release(buf);
715             mBqStat->mDequeued++;
716             numAlloc++;
717         }
718     } else {
719         GTEST_SKIP();
720     }
721     ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
722     ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
723 
724     ASSERT_EQ(C2_OK, mTracker->deallocate(bids[0], Fence::NO_FENCE));
725     mBqStat->mDiscarded++;
726 
727     maxDequeueCount += dequeueIncrease;
728     ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
729     for (int i = 0; i < dequeueIncrease + 1; ++i) {
730         ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
731         ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
732         AHardwareBuffer_release(buf);
733         mBqStat->mDequeued++;
734         numAlloc++;
735     }
736     ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
737     ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
738 
739     ASSERT_EQ(C2_OK, mTracker->deallocate(bids[1], Fence::NO_FENCE));
740     mBqStat->mDiscarded++;
741 
742     maxDequeueCount += dequeueIncrease;
743     ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
744     for (int i = 0; i < dequeueIncrease + 1; ++i) {
745         ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
746         ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
747         AHardwareBuffer_release(buf);
748         mBqStat->mDequeued++;
749         numAlloc++;
750     }
751     ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
752     ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
753 }
754 
TEST_F(GraphicsTrackerTest,maxDequeueDecreaseTest)755 TEST_F(GraphicsTrackerTest, maxDequeueDecreaseTest) {
756     uint32_t generation = 1;
757     int maxDequeueCount = 12;
758     int dequeueDecrease = 4;
759 
760     int numAlloc = 0;
761 
762     ASSERT_TRUE(init(maxDequeueCount));
763     ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
764                           new TestConsumerListener(mConsumer), 1, false));
765 
766     ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
767     ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
768 
769     int waitFd = -1;
770     ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
771     C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
772 
773     AHardwareBuffer *buf;
774     sp<Fence> fence;
775     uint64_t bids[maxDequeueCount];
776     if (__builtin_available(android __ANDROID_API_T__, *)) {
777         for (int i = 0; i < maxDequeueCount; ++i) {
778             ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
779             ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
780             ASSERT_EQ(OK, AHardwareBuffer_getId(buf, &bids[i]));
781             AHardwareBuffer_release(buf);
782             mBqStat->mDequeued++;
783             numAlloc++;
784         }
785     } else {
786         GTEST_SKIP();
787     }
788     ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
789     ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
790 
791     int discardIdx = 0;
792     maxDequeueCount -= dequeueDecrease;
793     ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
794     for (int i = 0; i < dequeueDecrease + 1; ++i) {
795         ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
796         ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
797         ASSERT_EQ(C2_OK, mTracker->deallocate(bids[discardIdx++], Fence::NO_FENCE));
798         mBqStat->mDiscarded++;
799     }
800     ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
801     ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
802     mBqStat->mDequeued++;
803 
804     ASSERT_EQ(C2_OK, mTracker->deallocate(bids[discardIdx++], Fence::NO_FENCE));
805     mBqStat->mDiscarded++;
806     ASSERT_EQ(C2_OK, mTracker->deallocate(bids[discardIdx++], Fence::NO_FENCE));
807     mBqStat->mDiscarded++;
808     maxDequeueCount -= dequeueDecrease;
809 
810     ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
811     for (int i = 0; i < dequeueDecrease - 1; ++i) {
812         ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
813         ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
814         ASSERT_EQ(C2_OK, mTracker->deallocate(bids[discardIdx++], Fence::NO_FENCE));
815         mBqStat->mDiscarded++;
816     }
817     ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
818     ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
819     mBqStat->mDequeued++;
820 }
821