1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #ifndef __ANDROID_HAL_CAMERA2_TESTS_STREAM_FIXTURE__
18 #define __ANDROID_HAL_CAMERA2_TESTS_STREAM_FIXTURE__
19 
20 #include <gtest/gtest.h>
21 #include <iostream>
22 #include <fstream>
23 
24 #include <gui/CpuConsumer.h>
25 #include <gui/Surface.h>
26 #include <utils/Condition.h>
27 #include <utils/Mutex.h>
28 #include <system/camera_metadata.h>
29 
30 #include "CameraModuleFixture.h"
31 #include "TestExtensions.h"
32 
33 #define ALIGN(x, mask) ( ((x) + (mask) - 1) & ~((mask) - 1) )
34 
35 namespace android {
36 namespace camera2 {
37 namespace tests {
38 
39 // Format specifier for picking the best format for CPU reading the given device
40 // version
41 #define CAMERA_STREAM_AUTO_CPU_FORMAT (-1)
42 
43 struct CameraStreamParams;
44 
45 void PrintTo(const CameraStreamParams& p, ::std::ostream* os);
46 
47 struct CameraStreamParams {
48     int mFormat;
49     int mHeapCount;
50 
51 };
52 
53 inline ::std::ostream& operator<<(::std::ostream& os, const CameraStreamParams &p) {
54     PrintTo(p, &os);
55     return os;
56 }
57 
PrintTo(const CameraStreamParams & p,::std::ostream * os)58 inline void PrintTo(const CameraStreamParams& p, ::std::ostream* os) {
59     char fmt[100];
60     camera_metadata_enum_snprint(
61         ANDROID_SCALER_AVAILABLE_FORMATS, p.mFormat, fmt, sizeof(fmt));
62 
63     *os <<  "{ ";
64     *os <<  "Format: 0x"  << std::hex << p.mFormat    << ", ";
65     *os <<  "Format name: " << fmt << ", ";
66     *os <<  "HeapCount: " <<             p.mHeapCount;
67     *os << " }";
68 }
69 
70 class CameraStreamFixture
71     : public CameraModuleFixture</*InfoQuirk*/true> {
72 
73 public:
CameraStreamFixture(CameraStreamParams p)74     CameraStreamFixture(CameraStreamParams p)
75     : CameraModuleFixture(TestSettings::DeviceId()) {
76         TEST_EXTENSION_FORKING_CONSTRUCTOR;
77 
78         mParam = p;
79 
80         SetUp();
81     }
82 
~CameraStreamFixture()83     ~CameraStreamFixture() {
84         TEST_EXTENSION_FORKING_DESTRUCTOR;
85 
86         TearDown();
87     }
88 
89 private:
90 
SetUp()91     void SetUp() {
92         TEST_EXTENSION_FORKING_SET_UP;
93 
94         CameraModuleFixture::SetUp();
95 
96         sp<CameraDeviceBase> device = mDevice;
97 
98         /* use an arbitrary w,h */
99         if (getDeviceVersion() < CAMERA_DEVICE_API_VERSION_3_2) {
100             const int tag = ANDROID_SCALER_AVAILABLE_PROCESSED_SIZES;
101 
102             const CameraMetadata& staticInfo = device->info();
103             camera_metadata_ro_entry entry = staticInfo.find(tag);
104             ASSERT_NE(0u, entry.count)
105                 << "Missing tag android.scaler.availableProcessedSizes";
106 
107             ASSERT_LE(2u, entry.count);
108             /* this seems like it would always be the smallest w,h
109                but we actually make no contract that it's sorted asc */
110             mWidth = entry.data.i32[0];
111             mHeight = entry.data.i32[1];
112         } else {
113             buildOutputResolutions();
114             const int32_t *implDefResolutions = NULL;
115             size_t   implDefResolutionsCount;
116 
117             int format = HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED;
118 
119             getResolutionList(format,
120                     &implDefResolutions, &implDefResolutionsCount);
121             ASSERT_NE(0u, implDefResolutionsCount)
122                 << "Missing implementation defined sizes";
123             mWidth = implDefResolutions[0];
124             mHeight = implDefResolutions[1];
125         }
126     }
TearDown()127     void TearDown() {
128         TEST_EXTENSION_FORKING_TEAR_DOWN;
129 
130         // important: shut down HAL before releasing streams
131         CameraModuleFixture::TearDown();
132 
133         deleteOutputResolutions();
134         mSurface.clear();
135         mCpuConsumer.clear();
136         mFrameListener.clear();
137     }
138 
139 protected:
140 
getMinFrameDurationFor(int32_t format,int32_t width,int32_t height)141     int64_t getMinFrameDurationFor(int32_t format, int32_t width, int32_t height) {
142         int64_t minFrameDuration = -1L;
143         const int tag = ANDROID_SCALER_AVAILABLE_MIN_FRAME_DURATIONS;
144         sp<CameraDeviceBase> device = mDevice;
145         const CameraMetadata& staticInfo = device->info();
146         camera_metadata_ro_entry_t availableMinDurations = staticInfo.find(tag);
147         for (uint32_t i = 0; i < availableMinDurations.count; i += 4) {
148             if (format == availableMinDurations.data.i64[i] &&
149                     width == availableMinDurations.data.i64[i + 1] &&
150                     height == availableMinDurations.data.i64[i + 2]) {
151                 minFrameDuration = availableMinDurations.data.i64[i + 3];
152                 break;
153             }
154         }
155         return minFrameDuration;
156     }
157 
buildOutputResolutions()158     void buildOutputResolutions() {
159         if (getDeviceVersion() < CAMERA_DEVICE_API_VERSION_3_2) {
160             return;
161         }
162         if (mOutputResolutions.isEmpty()) {
163             const int tag = ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS;
164             const CameraMetadata& staticInfo = mDevice->info();
165             camera_metadata_ro_entry_t availableStrmConfigs = staticInfo.find(tag);
166             ASSERT_EQ(0u, availableStrmConfigs.count % 4);
167             for (uint32_t i = 0; i < availableStrmConfigs.count; i += 4) {
168                 int32_t format = availableStrmConfigs.data.i32[i];
169                 int32_t width = availableStrmConfigs.data.i32[i + 1];
170                 int32_t height = availableStrmConfigs.data.i32[i + 2];
171                 int32_t inOrOut = availableStrmConfigs.data.i32[i + 3];
172                 if (inOrOut == ANDROID_SCALER_AVAILABLE_STREAM_CONFIGURATIONS_OUTPUT) {
173                     int index = mOutputResolutions.indexOfKey(format);
174                     if (index < 0) {
175                         index = mOutputResolutions.add(format, new Vector<int32_t>());
176                         ASSERT_TRUE(index >= 0);
177                     }
178                     Vector<int32_t> *resolutions = mOutputResolutions.editValueAt(index);
179                     resolutions->add(width);
180                     resolutions->add(height);
181                 }
182             }
183         }
184     }
185 
getResolutionList(int32_t format,const int32_t ** list,size_t * count)186     void getResolutionList(int32_t format,
187             const int32_t **list,
188             size_t *count) {
189         ALOGV("Getting resolutions for format %x", format);
190         if (getDeviceVersion() < CAMERA_DEVICE_API_VERSION_3_2) {
191             return;
192         }
193         int index = mOutputResolutions.indexOfKey(format);
194         ASSERT_TRUE(index >= 0);
195         Vector<int32_t>* resolutions = mOutputResolutions.valueAt(index);
196         *list = resolutions->array();
197         *count = resolutions->size();
198     }
199 
deleteOutputResolutions()200     void deleteOutputResolutions() {
201         for (uint32_t i = 0; i < mOutputResolutions.size(); i++) {
202             Vector<int32_t>* resolutions = mOutputResolutions.editValueAt(i);
203             delete resolutions;
204         }
205         mOutputResolutions.clear();
206     }
207 
208     struct FrameListener : public ConsumerBase::FrameAvailableListener {
209 
FrameListenerFrameListener210         FrameListener() {
211             mPendingFrames = 0;
212         }
213 
214         // CpuConsumer::FrameAvailableListener implementation
onFrameAvailableFrameListener215         virtual void onFrameAvailable(const BufferItem& /* item */) {
216             ALOGV("Frame now available (start)");
217 
218             Mutex::Autolock lock(mMutex);
219             mPendingFrames++;
220             mCondition.signal();
221 
222             ALOGV("Frame now available (end)");
223         }
224 
waitForFrameFrameListener225         status_t waitForFrame(nsecs_t timeout) {
226             status_t res;
227             Mutex::Autolock lock(mMutex);
228             while (mPendingFrames == 0) {
229                 res = mCondition.waitRelative(mMutex, timeout);
230                 if (res != OK) return res;
231             }
232             mPendingFrames--;
233             return OK;
234         }
235 
236     private:
237         Mutex mMutex;
238         Condition mCondition;
239         int mPendingFrames;
240     };
241 
CreateStream()242     void CreateStream() {
243         sp<CameraDeviceBase> device = mDevice;
244         CameraStreamParams p = mParam;
245 
246         sp<IGraphicBufferProducer> producer;
247         sp<IGraphicBufferConsumer> consumer;
248         BufferQueue::createBufferQueue(&producer, &consumer);
249         mCpuConsumer = new CpuConsumer(consumer, p.mHeapCount);
250         mCpuConsumer->setName(String8("CameraStreamTest::mCpuConsumer"));
251 
252         mSurface = new Surface(producer);
253 
254         int format = MapAutoFormat(p.mFormat);
255 
256         ASSERT_EQ(OK,
257             device->createStream(mSurface,
258                 mWidth, mHeight, format,
259                 HAL_DATASPACE_UNKNOWN,
260                 CAMERA3_STREAM_ROTATION_0,
261                 &mStreamId));
262 
263         ASSERT_NE(-1, mStreamId);
264 
265         // do not make 'this' a FrameListener or the lifetime policy will clash
266         mFrameListener = new FrameListener();
267         mCpuConsumer->setFrameAvailableListener(mFrameListener);
268     }
269 
DeleteStream()270     void DeleteStream() {
271         ASSERT_EQ(OK, mDevice->deleteStream(mStreamId));
272     }
273 
MapAutoFormat(int format)274     int MapAutoFormat(int format) {
275         if (format == CAMERA_STREAM_AUTO_CPU_FORMAT) {
276             if (getDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_0) {
277                 format = HAL_PIXEL_FORMAT_YCbCr_420_888;
278             } else {
279                 format = HAL_PIXEL_FORMAT_YCrCb_420_SP;
280             }
281         }
282         return format;
283     }
284 
DumpYuvToFile(const String8 & fileName,const CpuConsumer::LockedBuffer & img)285     void DumpYuvToFile(const String8 &fileName, const CpuConsumer::LockedBuffer &img) {
286         uint8_t *dataCb, *dataCr;
287         uint32_t stride;
288         uint32_t chromaStride;
289         uint32_t chromaStep;
290 
291         switch (img.format) {
292             case HAL_PIXEL_FORMAT_YCbCr_420_888:
293                 stride = img.stride;
294                 chromaStride = img.chromaStride;
295                 chromaStep = img.chromaStep;
296                 dataCb = img.dataCb;
297                 dataCr = img.dataCr;
298                 break;
299             case HAL_PIXEL_FORMAT_YCrCb_420_SP:
300                 stride = img.width;
301                 chromaStride = img.width;
302                 chromaStep = 2;
303                 dataCr = img.data + img.width * img.height;
304                 dataCb = dataCr + 1;
305                 break;
306             case HAL_PIXEL_FORMAT_YV12:
307                 stride = img.stride;
308                 chromaStride = ALIGN(img.width / 2, 16);
309                 chromaStep = 1;
310                 dataCr = img.data + img.stride * img.height;
311                 dataCb = dataCr + chromaStride * img.height/2;
312                 break;
313             default:
314                 ALOGE("Unknown format %d, not dumping", img.format);
315                 return;
316         }
317 
318         // Write Y
319         FILE *yuvFile = fopen(fileName.string(), "w");
320 
321         size_t bytes;
322 
323         for (size_t y = 0; y < img.height; ++y) {
324             bytes = fwrite(
325                 reinterpret_cast<const char*>(img.data + stride * y),
326                 1, img.width, yuvFile);
327             if (bytes != img.width) {
328                 ALOGE("Unable to write to file %s", fileName.string());
329                 fclose(yuvFile);
330                 return;
331             }
332         }
333 
334         // Write Cb/Cr
335         uint8_t *src = dataCb;
336         for (int c = 0; c < 2; ++c) {
337             for (size_t y = 0; y < img.height / 2; ++y) {
338                 uint8_t *px = src + y * chromaStride;
339                 if (chromaStep != 1) {
340                     for (size_t x = 0; x < img.width / 2; ++x) {
341                         fputc(*px, yuvFile);
342                         px += chromaStep;
343                     }
344                 } else {
345                     bytes = fwrite(reinterpret_cast<const char*>(px),
346                             1, img.width / 2, yuvFile);
347                     if (bytes != img.width / 2) {
348                         ALOGE("Unable to write to file %s", fileName.string());
349                         fclose(yuvFile);
350                         return;
351                     }
352                 }
353             }
354             src = dataCr;
355         }
356         fclose(yuvFile);
357     }
358 
359     int mWidth;
360     int mHeight;
361 
362     int mStreamId;
363 
364     android::sp<FrameListener>       mFrameListener;
365     android::sp<CpuConsumer>         mCpuConsumer;
366     android::sp<Surface>             mSurface;
367     KeyedVector<int32_t, Vector<int32_t>* > mOutputResolutions;
368 
369 private:
370     CameraStreamParams mParam;
371 };
372 
373 }
374 }
375 }
376 
377 #endif
378